From 13999940a045dc3a4496aa12e8867142d2377719 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 11:27:59 +0100 Subject: [PATCH 001/134] Updated environment lockfiles (#5337) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 26 +++++++++++++------------- requirements/locks/py311-linux-64.lock | 26 +++++++++++++------------- requirements/locks/py39-linux-64.lock | 24 ++++++++++++------------ 3 files changed, 38 insertions(+), 38 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 6133f53225..83cf6efc0f 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 081cae6b15083563c7942b761f2295b86794b799d4c679a81f95c695c576e491 +# input_hash: 2b004b3b54cef3f1b8e174aef2273590c7e578f60de14562357ef83ec73063ce @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 @@ -53,13 +53,13 @@ https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-hd590300_3.conda#8f24d371ed9efb3f0b0de383fb81d51c +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 @@ -76,7 +76,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.ta https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-h3358134_0.conda#c164eb2e0df905571d68f40ae957522d +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e @@ -91,7 +91,7 @@ https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.c https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc @@ -147,7 +147,7 @@ https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_1 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.1-h409715c_0.conda#4b82f5c9fc26b31d0f9302773a657507 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 @@ -186,7 +186,7 @@ https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py310h2372a71_0.conda#1c510e74c87dc9b8fe1f7f9e8dbcef96 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.2-pyha770c72_0.conda#5a4a270e5a3f93846d6bade2f71fa440 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72_0.conda#4a3014a4d107d15475d106b751c4e352 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 @@ -201,8 +201,8 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py310hdf3cbec_0.conda#7bf9d8c765b6b04882c719509652c6d6 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.6-py310h2372a71_0.conda#93b5564452a94d4bc633ab692ef29598 -https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.1-h409715c_0.conda#effaa9ea047f960bc70225be8337fd91 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py310h2372a71_0.conda#13df1c4ea94f2e3326b15da1999e5999 +https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py310h5764c6d_1.tar.bz2#fd18cd597d23b2b5ddde23bd5b7aec32 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py310h2372a71_0.conda#76426eaff204520e719207700359a855 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 @@ -217,12 +217,12 @@ https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721 https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py310h582fbeb_1.conda#cf62f6cff3536eafaaa0c740b0bf7465 https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h056c13c_1.conda#32d925cfd330e0cbb72b7618558a44e8 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py310hc6cd4ac_0.conda#a3217e1bff09702dfdfcb536825fc12d -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.2-hd8ed1ab_0.conda#f676553904bb8f7c1dfe71c9db0d9ba7 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd @@ -230,7 +230,7 @@ https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda# https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py310he60537e_0.conda#68b2dd34c69d08b05a9db5e3596fe3ee -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.1-py310h7cbd5c2_1.conda#25fc16ee9a1df69e91c8213530f2cc8c +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py310h7cbd5c2_0.conda#e0b845c6b29a1ed2e409bef6c0f5d96b https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py310hb814896_1.conda#d44c6841ee904252e0e8b7a1c7b11383 @@ -255,7 +255,7 @@ https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.0-h98fae49_1.conda#1cad58e8dceb1af51dbd963bb7f53f34 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 7ab3e170f6..3fb33e82cd 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 735e824e95f2b2e689fb7433e592a1511d9a7959fe4b524373621b99ae41ee87 +# input_hash: 2f069ddfd9b505e06a2c4ed71dff8be24543629660bd8a39c1a41dde291bd352 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 @@ -53,13 +53,13 @@ https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-hd590300_3.conda#8f24d371ed9efb3f0b0de383fb81d51c +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 @@ -76,7 +76,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.ta https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-h3358134_0.conda#c164eb2e0df905571d68f40ae957522d +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e @@ -91,7 +91,7 @@ https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.c https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc @@ -147,7 +147,7 @@ https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py311h4dd048b_1 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.1-h409715c_0.conda#4b82f5c9fc26b31d0f9302773a657507 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 @@ -186,7 +186,7 @@ https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py311h459d7ec_0.conda#12b1c374ee90a1aa11ea921858394dc8 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.2-pyha770c72_0.conda#5a4a270e5a3f93846d6bade2f71fa440 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72_0.conda#4a3014a4d107d15475d106b751c4e352 https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -200,8 +200,8 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py311ha3edf6b_0.conda#e7548e7f58965a2fe97a95950a5fedc6 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.6-py311h459d7ec_0.conda#959422baa360b4aaf505aedff7d77943 -https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.1-h409715c_0.conda#effaa9ea047f960bc70225be8337fd91 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py311h459d7ec_0.conda#3c2c65575c28b23afc5e4ff721a2fc9f +https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py311hd4cff14_1.tar.bz2#21523141b35484b1edafba962c6ea883 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py311h459d7ec_0.conda#ddd2cd004e10bc7a1e042283326cbf91 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 @@ -216,12 +216,12 @@ https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721 https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py311h0b84326_1.conda#6be2190fdbf26a6c1d3356a54d955237 https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311h54d622a_1.conda#a894c65b48676c4973e9ee8b59bceb9e https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py311hb755f60_0.conda#2b5430f2f1651f460c852e1fdd549184 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.2-hd8ed1ab_0.conda#f676553904bb8f7c1dfe71c9db0d9ba7 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd @@ -229,7 +229,7 @@ https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda# https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py311h8597a09_0.conda#70c3b734ffe82c16b6d121aaa11929a8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.1-py311h320fe9a_1.conda#37f841a3140999c4735f7d8091072bea +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py311h320fe9a_0.conda#509769b430266dc5c2f6a3eab0f23164 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py311h1850bce_1.conda#572159a946b809df471b11db4995c708 @@ -254,7 +254,7 @@ https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.0-h98fae49_1.conda#1cad58e8dceb1af51dbd963bb7f53f34 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 2467c06504..8e584a3a74 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: bd8a57fefa94205701a278eab5fbfd897a3c49e389f926b1880d718caa0d6195 +# input_hash: b3cb1f7bc6b32267d57b62a9f2f18ea72ba40a12bb0f57668771079837395d34 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 @@ -53,13 +53,13 @@ https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-hd590300_3.conda#8f24d371ed9efb3f0b0de383fb81d51c +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 @@ -76,7 +76,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.ta https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-h3358134_0.conda#c164eb2e0df905571d68f40ae957522d +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e @@ -91,7 +91,7 @@ https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.c https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc @@ -147,7 +147,7 @@ https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_1. https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.1-h409715c_0.conda#4b82f5c9fc26b31d0f9302773a657507 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 @@ -186,7 +186,7 @@ https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py39hd1e30aa_0.conda#da334eecb1ea2248e28294c49e6f6d89 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.2-pyha770c72_0.conda#5a4a270e5a3f93846d6bade2f71fa440 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72_0.conda#4a3014a4d107d15475d106b751c4e352 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 @@ -201,7 +201,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py39h4b4f3f3_0.conda#c5387f3fb1f5b8b71e1c865fc55f4951 -https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.1-h409715c_0.conda#effaa9ea047f960bc70225be8337fd91 +https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py39hb9d737c_1.tar.bz2#eb31327ace8dac15c2df243d9505a132 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py39hd1e30aa_0.conda#80605b792f58cf5c78a5b7e20cef1e35 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 @@ -217,12 +217,12 @@ https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721 https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py39haaeba84_1.conda#d7aa9b99ed6ade75fbab1e4cedcb3ce2 https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39hf1c3bca_1.conda#ae6bfe65e81d9b59a71cc01a2858650f https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py39h3d6467e_0.conda#6d990f672cc70e5c480ddb74b789a17c -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.2-hd8ed1ab_0.conda#f676553904bb8f7c1dfe71c9db0d9ba7 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd @@ -230,7 +230,7 @@ https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda# https://conda.anaconda.org/conda-forge/noarch/importlib-resources-5.12.0-pyhd8ed1ab_0.conda#3544c818f0720c89eb16ae6940ab440b https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.1-py39h40cae4c_1.conda#85bc4d45cd98f84af0c00435fff23f67 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py39h40cae4c_0.conda#de99b3f807c0b295a7df94623df0fb4c https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py39h718ffca_1.conda#a19bf4be7ebce54623541fa4ad22abb4 @@ -254,7 +254,7 @@ https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.0-h98fae49_1.conda#1cad58e8dceb1af51dbd963bb7f53f34 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 From 18d24a974996a0fca264d59e9723b6250533ad53 Mon Sep 17 00:00:00 2001 From: Henry Wright <84939917+HGWright@users.noreply.github.com> Date: Mon, 12 Jun 2023 11:16:09 +0100 Subject: [PATCH 002/134] Adding a Dask best practices section to the user guide (#5190) * Adding a Dask best practices section to the user guide * Updated example 2, adjusted internal MO references * making requested changes from review * fixing merge conflict and rest of requested changes * finishing requested changes? * fixing dask docs link for linkcheck * Update docs/src/whatsnew/latest.rst Co-authored-by: lbdreyer --------- Co-authored-by: lbdreyer --- .../dask_bags_and_greed.rst | 235 ++++++++++++++++++ .../dask_parallel_loop.rst | 169 +++++++++++++ .../dask_best_practices/dask_pp_to_netcdf.rst | 92 +++++++ .../images/grib-bottleneck.png | Bin 0 -> 63951 bytes .../loop_third_party_kapture_results.png | Bin 0 -> 84484 bytes .../dask_best_practices/index.rst | 221 ++++++++++++++++ docs/src/userguide/index.rst | 1 + docs/src/whatsnew/latest.rst | 4 + 8 files changed, 722 insertions(+) create mode 100644 docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst create mode 100644 docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst create mode 100644 docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst create mode 100644 docs/src/further_topics/dask_best_practices/images/grib-bottleneck.png create mode 100644 docs/src/further_topics/dask_best_practices/images/loop_third_party_kapture_results.png create mode 100644 docs/src/further_topics/dask_best_practices/index.rst diff --git a/docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst b/docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst new file mode 100644 index 0000000000..007a58d400 --- /dev/null +++ b/docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst @@ -0,0 +1,235 @@ +.. _examples_bags_greed: + +3. Dask Bags and Greedy Parallelism +----------------------------------- + +Here is a journey that demonstrates: + +* How to apply dask.bags to an existing script +* The equal importance of optimisation of non-parallel parts of a script +* Protection against multiple softwares trying to manage parallelism + simultaneously + + +3.1 The Problem - Slow Loading +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +We have ~7000 GRIB files spread between 256 dated directories:: + + . + |-- 20180401 + | |-- gfs.t00z.icing.0p25.grb2f006 + | |-- gfs.t00z.icing.0p25.grb2f006.1 + | |-- gfs.t00z.icing.0p25.grb2f012 + | |-- gfs.t00z.icing.0p25.grb2f018 + | |-- gfs.t00z.icing.0p25.grb2f024 + | |-- gfs.t00z.icing.0p25.grb2f030 + | `-- gfs.t00z.icing.0p25.grb2f036 + |-- 20180402 + | `-- gfs.t00z.icing.0p25.grb2f006 + |-- 20180403 + | |-- gfs.t12z.icing.0p25.grb2f006 + | |-- gfs.t12z.icing.0p25.grb2f012 + +With this script, a sample of 11 GRIB files takes ~600secs to load:: + + import iris + import glob + + fpaths=glob.glob('20190416/*t18z*f???') + cubes = iris.load(fpaths, callback=callback) + + def callback(cube, field, fname): + if field.sections[5]['bitsPerValue'] == 0: + raise iris.exceptions.IgnoreCubeException + if field.sections[4]['parameterNumber'] == 20: + raise iris.exceptions.IgnoreCubeException + elif field.sections[4]['parameterNumber'] == 234: + cube.long_name = 'Icing Severity' + +3.2 Parallelisation +^^^^^^^^^^^^^^^^^^^ +We'll try using `dask.bag `_ to +parallelise the function calls. It's important that Dask is given the freedom +to break the task down in an efficient manner - the function that is mapped +across the bag should only load a single file, and the bag itself can +iterate through the list of files. Here's the restructured script:: + + import glob + import multiprocessing + import os + + import dask + import dask.bag as db + import iris + + def callback(cube, field, fname): + if field.sections[5]['bitsPerValue'] == 0: + raise iris.exceptions.IgnoreCubeException + if field.sections[4]['parameterNumber'] == 20: + raise iris.exceptions.IgnoreCubeException + elif field.sections[4]['parameterNumber'] == 234: + cube.long_name = 'Icing Severity' + + def func(fname): + return iris.load_cube(fname, callback=callback) + + fpaths = list(glob.glob('20190416/*t18z*f???')) + + # Determine the number of processors visible .. + cpu_count = multiprocessing.cpu_count() + + # .. or as given by slurm allocation. + # Only relevant when using Slurm for job scheduling + if 'SLURM_NTASKS' in os.environ: + cpu_count = os.environ['SLURM_NTASKS'] + + # Do not exceed the number of CPUs available, leaving 1 for the system. + num_workers = cpu_count - 1 + print('Using {} workers from {} CPUs...'.format(num_workers, cpu_count)) + + # Now do the parallel load. + with dask.config.set(num_workers=num_workers): + bag = db.from_sequence(fpaths).map(func) + cubes = iris.cube.CubeList(bag.compute()).merge() + +This achieves approximately a 10-fold improvement if enough CPUs are +available to have one per file. See this benchmarking: + ++---------------+-----------------------+---------------+---------------+ +| Machine | CPUs Available | CPUs Used | Time Taken | ++===============+=======================+===============+===============+ +| A | 4 | 3 | 4m 05s | +| | +---------------+---------------+ +| | | 4 | 3m 22s | ++---------------+-----------------------+---------------+---------------+ +| B | 8 | 1 | 9m 10s | +| | +---------------+---------------+ +| | | 7 | 2m 35s | +| | +---------------+---------------+ +| | | 8 | 2m 20s | ++---------------+-----------------------+---------------+---------------+ + + +.. _examples_bags_greed_profile: + +3.3 Profiling +^^^^^^^^^^^^^ +1m 10s is still a surprisingly long time. When faced with a mystery like +this it is helpful to profile the script to see if there are any steps that +are taking more time than we would expect. For this we use a tool called +`kapture `_ to produce a +flame chart visualising the time spent performing each call: + +.. image:: images/grib-bottleneck.png + :width: 1000 + :align: center + +From this we see that 96% of the runtime is taken by this call:: + + res = gribapi.grib_get_array(self._message_id, key) + +This is the call being used during the ``callback`` function when it uses +GRIB messages to filter out cubes with certain unwanted properties. + +3.4 Improving GRIB Key Handling +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Even with parallelisation, we are still limited by the time it takes to run +a single instance of a function. This is going to become much more important +when running 7000 files instead of 11, since there will be nowhere near +enough CPUs even on a large multi-processing system, meaning each CPU will be running many instances +of the function. **Parallelisation can only go so far to solving speed issues** -- +it's effectively the 'brute force' method. + +:ref:`examples_bags_greed_profile` showed us where the major bottleneck is. To improve efficiency +we can re-write the script to filter on GRIB messages *before* converting +the GRIB file to a cube:: + + import dask + import dask.bag as db + import glob + import iris + import multiprocessing + import os + + def func(fname): + import iris + from iris_grib import load_pairs_from_fields + from iris_grib.message import GribMessage # perform GRIB message level filtering... + filtered_messages = [] + for message in GribMessage.messages_from_filename(fname): + if (message.sections[5]['bitsPerValue'] != 0 and + message.sections[4]['parameterNumber'] == 234): + filtered_messages.append(message) # now convert the messages to cubes... + cubes = [cube for cube, message in load_pairs_from_fields(filtered_messages)] + return iris.cube.CubeList(cubes).merge_cube() + + fpaths = list(glob.glob('/scratch/frcz/ICING/GFS_DATA/20190416/*t18z*f???')) + cpu_count = multiprocessing.cpu_count() + + # Only relevant when using Slurm for job scheduling + if 'SLURM_NTASKS' in os.environ: + cpu_count = os.environ['SLURM_NTASKS'] + + num_workers = cpu_count - 1 + + print('Using {} workers from {} CPUs...'.format(num_workers, cpu_count)) + with dask.config.set(num_workers=num_workers): + bag = db.from_sequence(fpaths).map(func) + cubes = iris.cube.CubeList(bag.compute()) + +This achieves a significant performance improvement - more than twice as +fast as the previous benchmarks: + ++---------------+-----------------------+---------------+---------------+-----------+ +| Machine | CPUs Available | CPUs Used | Previous Time | New Time | ++===============+=======================+===============+===============+===========+ +| Example | 8 | 7 | 2m 35s | 1m 05s | +| | +---------------+---------------+-----------+ +| | | 8 | 2m 20s | 1m 03s | ++---------------+-----------------------+---------------+---------------+-----------+ + +3.5 Managing External Factors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +The speed will still need to be further improved before we can process 7000 +files. The main gains we can achieve are by making sure it is **only Dask** +that manages multi-processing - if multi-processing is coming from more +than one place there are predictable clashes. + +First, NumPy must be prevented from performing it's own multi-processing by +adding the following **before** ``import numpy`` is called. You can read more +about this in :ref:`numpy_threads`. + +:: + + import os + + os.environ["OMP_NUM_THREADS"] = "1" + os.environ["OPENBLAS_NUM_THREADS"] = "1" + os.environ["MKL_NUM_THREADS"] = "1" + os.environ["VECLIB_MAXIMUM_THREADS"] = "1" + os.environ["NUMEXPR_NUM_THREADS"] = "1" + +Lastly, if you are using SLURM on the computing cluster then SLURM must be configured to prevent it +optimising the number of cores necessary for the job. See the SLURM commands +below, to be added before running the python script. It's important that +``ntasks`` matches the number of CPUs specified in the python script. You +can read more about these points in :ref:`multi-pro_slurm`. + +:: + + #SBATCH --ntasks=12 + #SBATCH --ntasks-per-core=1 + +This has all been based on a real example. Once all the above had been set +up correctly, the completion time had dropped from an estimated **55 days** +to **less than 1 day**. + +3.6 Lessons +^^^^^^^^^^^ +* Dask isn't a magic switch - it's important to write your script so that + there is a way to create small sub-tasks. In this case by providing + dask.bag with the file list and the function separated +* Parallelism is not the only performance improvement to try - the script + will still be slow if the individual function is slow +* All multi-processing needs to be managed by Dask. Several other factors + may introduce multi-processing and these need to be configured not to diff --git a/docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst b/docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst new file mode 100644 index 0000000000..836503314c --- /dev/null +++ b/docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst @@ -0,0 +1,169 @@ +.. _examples_parallel_loop: + +2. Parallelising a Loop of Multiple Calls to a Third Party Library +------------------------------------------------------------------ + +Whilst Iris does provide extensive functionality for performing statistical and +mathematical operations on your data, it is sometimes necessary to use a third +party library. + +The following example describes a real world use case of how to parallelise +multiple calls to a third party library using dask bags. + +2.1 The Problem - Parallelising a Loop +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +In this particular example, the user is calculating a sounding parcel for each +column in their dataset. The cubes that are used are of shape:: + + (model_level_number: 20; grid_latitude: 1536; grid_longitude: 1536) + +As a sounding is calculated for each column, this means there are 1536x1536 +individual calculations. + +In Python, it is common practice to vectorize the calculation of for loops. +Vectorising is done by using NumPy to operate on the whole array at once rather +than a single element at a time. Unfortunately, not all operations are +vectorisable, including the calculation in this example, and so we look to +other methods to improve the performance. + +2.2 Original Code with Loop +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +We start out by loading cubes of pressure, temperature, dewpoint temperature and height:: + + import iris + import numpy as np + from skewt import SkewT as sk + + pressure = iris.load_cube('a.press.19981109.pp') + temperature = iris.load_cube('a.temp.19981109.pp') + dewpoint = iris.load_cube('a.dewp.19981109.pp') + height = iris.load_cube('a.height.19981109.pp') + +We set up the NumPy arrays we will be filling with the output data:: + + output_arrays = [np.zeros(pressure.shape[0]) for _ in range(6)] + cape, cin, lcl, lfc, el, tpw = output_data + +Now we loop over the columns in the data to calculate the soundings:: + + for y in range(nlim): + for x in range(nlim): + mydata = {'pres': pressure[:, y, x], + 'temp': temperature[:, y, x], + 'dwpt': dewpoint[:, y, x], + 'hght': height[:, y, x]} + + # Calculate the sounding with the selected column of data. + S = sk.Sounding(soundingdata=mydata) + try: + startp, startt, startdp, type_ = S.get_parcel(parcel_def) + P_lcl, P_lfc, P_el, CAPE, CIN = S.get_cape( + startp, startt, startdp, totalcape='tot') + TPW = S.precipitable_water() + except: + P_lcl, P_lfc, P_el, CAPE, CIN, TPW = [ + np.ma.masked for _ in range(6)] + + # Fill the output arrays with the results + cape[y,x] = CAPE + cin[y,x] = CIN + lcl[y,x] = P_lcl + lfc[y,x] = P_lfc + el[y,x] = P_el + tpw[y,x] = TPW + +2.3 Profiling the Code with Kapture +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Kapture is a useful statistical profiler. For more information see `the +Kapture repo `_. + +Results below: + +.. image:: images/loop_third_party_kapture_results.png + :width: 1000 + :align: center + +As we can see above, (looking at the highlighted section of the red bar) it spends most of the time in the call to :: + + S.get_parcel(parcel_def) + +As there are over two million columns in the data, we would greatly benefit +from parallelising this work. + +2.4 Parallelising with Dask Bags +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Dask bags are collections of Python objects that you can map a computation over +in a parallel manner. + +For more information about dask bags, see the `Dask Bag Documentation +`_. + +Dask bags work best with lightweight objects, so we will create a collection of +indices into our data arrays. + +First, we put the loop into a function that takes a slice object to index the +appropriate section of the array.:: + + def calculate_sounding(y_slice): + for y in range(y_slice.stop-y_slice.start): + for x in range(nlim): + mydata = {'pres': pressure[:, y_slice][:, y, x], + 'temp': temperature[:, y_slice][:, y, x], + 'dwpt': dewpoint[:, y_slice][:, y, x], + 'hght': height[:, y_slice][:, y, x]} + + # Calculate the sounding with the selected column of data. + S = sk.Sounding(soundingdata=mydata) + try: + startp, startt, startdp, type_ = S.get_parcel(parcel_def) + P_lcl, P_lfc, P_el, CAPE, CIN = S.get_cape( + startp, startt, startdp, totalcape=total_cape) + TPW = S.precipitable_water() + except: + P_lcl, P_lfc, P_el, CAPE, CIN, TPW = [ + np.ma.masked for _ in range(6)] + + # Fill the output arrays with the results + cape[:, y_slice][y,x] = CAPE + cin[:, y_slice][y,x] = CIN + lcl[:, y_slice][y,x] = P_lcl + lfc[:, y_slice][y,x] = P_lfc + el[:, y_slice][y,x] = P_el + tpw[:, y_slice][y,x] = TPW + +Then we create a dask bag of slice objects that will create multiple partitions +along the y axis.:: + + num_of_workers = 4 + len_of_y_axis = pressure.shape[1] + + part_loc = [int(loc) for loc in np.floor(np.linspace(0, len_of_y_axis, + num_of_workers + 1))] + + dask_bag = db.from_sequence( + [slice(part_loc[i], part_loc[i+1]) for i in range(num_of_workers)]) + + with dask.config.set(scheduler='processes'): + dask_bag.map(calculate_sounding).compute() + +When this was run on a machine with 4 workers, a speedup of ~4x was achieved, +as expected. + +Note that if using the processes scheduler this is some extra time spent +serialising the data to pass it between workers. For more information on the +different schedulers available in Dask, see `Dask Scheduler Overview +`_. + +For more speed up, it is possible to run the same code on a multi-processing +system where you will have access to more CPUs. + +In this particular example, we are handling multiple numpy arrays and so we use +dask bags. If working with a single numpy array, it may be more appropriate to +use Dask Arrays (see `Dask Arrays +`_ for more information). + + +2.5 Lessons +^^^^^^^^^^^ +* If possible, dask bags should contain lightweight objects +* Minimise the number of tasks that are created diff --git a/docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst b/docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst new file mode 100644 index 0000000000..28784154b4 --- /dev/null +++ b/docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst @@ -0,0 +1,92 @@ +.. _examples_pp_to_ff: + +1. Speed up Converting PP Files to NetCDF +----------------------------------------- + +Here is an example of how dask objects can be tuned for better performance. + +1.1 The Problem - Slow Saving +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +We have ~300 PP files which we load as follows: + +.. code-block:: python + + import iris + import glob + + files = glob.glob("pp_files/*.pp") + cube = iris.load_cube(files, "mass_fraction_of_ozone_in_air") + +Note that loading here may also be parallelised in a similar manner as +described in :ref:`examples_bags_greed`. Either way, the resulting cube looks +as follows: + +.. code-block:: text + + mass_fraction_of_ozone_in_air / (kg kg-1) (time: 276; model_level_number: 85; latitude: 144; longitude: 192) + Dimension coordinates: + time x - - - + model_level_number - x - - + latitude - - x - + longitude - - - x + Auxiliary coordinates: + forecast_period x - - - + level_height - x - - + sigma - x - - + Scalar coordinates: + forecast_reference_time: 1850-01-01 00:00:00 + Attributes: + STASH: m01s34i001 + source: Data from Met Office Unified Model + um_version: 10.9 + Cell methods: + mean: time (1 hour) + +The cube is then immediately saved as a netCDF file. + +.. code-block:: python + + nc_chunks = [chunk[0] for chunk in cube.lazy_data().chunks] + iris.save(cube, "outfile.nc", nc_chunks) + +This operation was taking longer than expected and we would like to improve +performance. Note that when this cube is being saved, the data is still lazy, +data is both read and written at the saving step and is done so in chunks. +The way this data is divided into chunks can affect performance. By tweaking +the way these chunks are structured it may be possible to improve performance +when saving. + + +.. _dask_rechunking: + +1.2 Rechunking +^^^^^^^^^^^^^^ +We may inspect the cube's lazy data before saving: + +.. code-block:: python + + # We can access the cubes Dask array + lazy_data = cube.lazy_data() + # We can find the shape of the chunks + # Note that the chunksize of a Dask array is the shape of the chunk + # as a tuple. + print(lazy_data.chunksize) + +Doing so, we find that the chunks currently have the shape:: + +(1, 1, 144, 192) + +This is significantly smaller than the `size which Dask recommends +`_. Bear in mind that the +ideal chunk size depends on the platform you are running on (for this example, +the code is being run on a desktop with 8 CPUs). In this case, we have 23460 +small chunks. We can reduce the number of chunks by rechunking before saving: + +.. code-block:: python + + lazy_data = cube.lazy_data() + lazy_data = lazy_data.rechunk(1, 85, 144, 192) + cube.data = lazy_data + +We now have 276 moderately sized chunks. When we try saving again, we find +that it is approximately 4 times faster, saving in 2m13s rather than 10m33s. diff --git a/docs/src/further_topics/dask_best_practices/images/grib-bottleneck.png b/docs/src/further_topics/dask_best_practices/images/grib-bottleneck.png new file mode 100644 index 0000000000000000000000000000000000000000..c029d57e5e4eff5f182398452cb6ea00ed700d9d GIT binary patch literal 63951 zcmb^Z1z1(>);0_;TR}iUDMb`Pq?MLbkuGVFR3sN6-C=;Dgf0XGq@-KvShRq&v@}RB zx*NVRz4xu|=lS3FIR5YWJodhI!D7yNUDp`rIL~v8h0lFCaRNL_JQNB=ASrQA5rsP8 zi9#I>I(ZzvBCAkk1AiU0xhtuB68^ZJeEbStQ`m~C+bUTa+B!V6Hb5C!SehHK*yvju z7+BaCTiPxhDHB4WE~6yx-BEV@I6L6r+GKQ4yR=b9dgbQbudm;|^`?yZ@}O6-gS2=( zDP43X@%o5GYj3J@+NbL*=>xs7rVA<@kG>AXeIdzeULbNHOgxoEG(EHB`0?dJVaAM$ zoA6}#(v(}7jkd0cnOT|wJkNjrc&bIpK7Je*kN)=y@ORuHCTX(2UQ1j!TK?zdTgfx1 zBfnqXzIF$7`u8hT=+Q$r{=9lj{QuDp{!lOab=C5+zVL-EMKd$AXY1GeC{yGE=5bxA z0rjQ(MMBdD?~bLob0rXt6u%sv!*dil@((>L z3#S!a!@EO1>w8&0PZ-o;==AZO4^7VYjQ^ZX>*Ye%XRF_I<92-!FMSw!KlV{dhog9A z2y6V80}48_kXnyT*U5Qp0t%tK;{KP~Bj+@gINAQ$v*Gyj=g-G);CPfpb#-M!Z^&V6 zuhQNlNtEfVQ%W?@*N?4tcjvPmdu7_`s>Tw&mFiC7p^~npFf5f#7tU3$5Qws4gcuXkgrvRb-+aw5S;H$Zj?`=20tQ*66s{j98XFxSrJnbk`|>ejUj zJj$c#JY3El5|WBRQtmjO;lRh*E%~muWH)h=pIv4*Pf-cFYc+*&D+E|*(@EZ`sOZgh zh?*ZPPMqz@qT9H72-ygoLrYBVQMoj{g5y?povU?$GFLvYZl0A?qW4!5;>rKL{Lj4N z;^JE>X?k73Kel@>T)BdYdtF+ZTojq^m)^4@9YTFTG)JwL(H6j@7P{v3qMsIpzy&G} zU-N=yPhX#~lO>wTg@0%AzpD~9XfP`%We7I zczq~`=nC;aDXc?r$qP>!;~&8rQ$uipcFMup8bhm%hsT~v;a5Y}togM{1app(GQrxQ zKCbjWa{1ebRfgO*3irw^`!IKcuosW-4;dvaI&6>mVB&g5Gd~!PF z`D+6+eHDc=livpfxDVuLaC37rsz-ae_w9}yT>kv!%R2`jV!oBhC`xwM7rtV#`#AD`CrzI-}Km-YGf zp^=e*9dD1w<58r@Q?iG|?LM^-^YK{Eq>JG9(21;mgZ|mAFPrL?C?A=0==t%PT`zow z$dG?>dQpmkXF^)`gMet+!P3ht!=@C0j10rQ=fYG;t-1m2gL^05^Q*CPq1*0ByQ}Y4 zIvw#Q=$tKF$#H3+?)6xVUM?<9Rd1zXr}Xo&xK(1lLn!$AHG4^!^ITYlPVqILf9^rN z!u}$IE%58Sc*e(DO4wzd$dUcM9X9z6!e6_}Za$EwT_xtPxNvD;?M`ZNFzX5A>!>$b zS+>9S5EWNbQT38e(8-_e*EbFf%?=SD|8k5!O}$3l)7O(Kah6CMYb$xdeNUDSE1)v% z_d1yM*IQ6;uKp8hSoTMwKOdvfse31gaQQ5nV`2RHl%1KGkpsnb^{-_!eqDK(qWst2 zzy9-9a&fZt@0b6tvH!={nwkeizasvh>)odO?Bna3WBbn|{`#6Hr=HEiAeD6YpF5%? zj&RR(XMXt~Z+qpuS#CT{^Vj>5jY!By%%>^;`jSa`ecQ3WUbh|p@8ZPGzI;;e&-cHb zHhbmq&t@ZEylkU~Z~WJHX;1zW&A+~MIq>p7Tl(vjr%a`oSmfcq>)=y@CAPM6m7ixR_8!x@P`4 zDk}47;28h;^Y&DadXcAEMSMeufu&LA?$!W{cD;>unY)NWs{-TCk%!ZY?7pqW$_u5` zCKZ*{m^gli1=ISkOYG#QNi$}B0_o*V*881$OWhIyPibRYuk-WsmrTs1Z?EA}q!az? z!+8SY8(gO$ZSFwcuxNW%f32>bUVIk|2`9OHOXK22vEUA=W^5fOJtS?^_hSMlaTN{Vk=;&}S&kLt$1`NLT^XvS;CYG!lr9+xLid&lr zh;nn@P%YAJFy>5gC<~AsvBYLX@SD_swIU^Yx*+Tr-QrEyjXgFb=wM*2DlYDaN1kzs z+CJB@i7i{zybs;;(<_+us$|>fq~6pbN;Zt0rDyW|(&G2sTSGX%*XP^PexU*Kkc-P% zlFY%q4#RI#RFl&_OkK|&^$3>?Of-dO#l%fb8QX1rPFx;uNHkSRRWD$-DYiez$QW>8 z;O17vtoD2!f$YY0=jT_O*1sxaoZ3@3I69I^+Z4*9d;IyGKzjcMPex*g$!01puO_!@30M#rUzKPa32eX#Hedie;|_1PY-i8l(pn|+oKvh7PWJhBsH zSMB|9m^c>Ae3S8(vLyYN_S*()54eomWP}fLw1RdAC8}Qk=X`*^TyJHGPE}3*IJHz( z*ABb<^`V9s~W$Dd2#PoT7N4O)E#cg% znz8XSRDo)fL+ASQE(|Rb6I1%`7Fn)&`-6dEJB1Q&+0Gwj$FBS)nFoh`67+AjDkPjQ zlc!GXnqbi!SHQk_?HJsVH^(nC=k?l|A=QTUR^eFJ{Ln^RU9RHQFNiejTcJyf>}@fdz5jp_eiqbP7v1Bbg@=gnYwbI^L90JRE>>4>~AwEBky1T zScr-H_Ufc$fJnnjBI< zDtRa?zfW-E;1DtO3u$4AdQHmm>67T2Z#6Z1cgjkhKZn(^ z3?3JFc$bH;>gL%>{=1^f5kF|x-`CUk30-LNSEAwLdvHW(OSnn%^L@yzBSQ0uAAXR_ zMy&g76J3lGJ`kXf?dh*Mr^}RkO)fH}Mch~F`z7jq(xvOn(w*xxm%Q+QEh}H=s&&~F58Ge*YF#UeTVai1--DAGnikQ{*`Y0~*C0`ktV}>8A+W1mwoc}Bj@9g45vD5k$-{Kroj15HbP~EdGrV_H zL@zeZ-HrX)QMm>e15V1$L|qTJeB&?Pr!U1_d46I~-*an8I8)GJ(1B9eD$AdWKSPqF zCLVp#b?STI!Zn7dT}Zskns&pJ9v#yqxU{^5KPhKb zH_e$?@pdubNGbVV5UE?4GUnVlThX>rEBWqW%~GuOWOF<|8C&T^U8R2`=Iw+TF}mMl z_;ZRfg5~0dwRKCAQIj3mwG&0Q(^<=7R-GI8ljy3NAzOFhhEL15jKgW`lwpS0lr{)KW zTd0g=XnSr_`cYDr5%WI3JDjsDm?+0@J|AC^0FiHfmf<;+5#(;WUlx6sPQj7Alep7g7O@B{4xk zL1t#edt8{9NBa7mxi0eQn^y7e8YN<7X<85Qo<^PDZ;lX@?&|8Ywrx!LS^Y*MW%Zm% zTT+bszCrGMf1!St`<{o#=0h|())|m#ty`L&w_EQ9?Kb}}A&TtXV?v*6JSsAMLgCc( zoe52v9WgdM10^nM@vg3%md(t^e0T58@a&sXu5=ch;&a>f?Ml~)m(wnqSqFuL_K;uNTDtQ2c}a=TY!@8? zlQi|gX%9M+<&Nnv0j?45>ZM@jojcOOdy9?xtAtOZ2t}@5y&CAM15m=C97M}s+#&41 zFF4zut4Tn)6K={ye)<8ghkqES#p5qwk%VxegQw3;J8}x$H45)SVRW{DbBN%}$(9&M zR!CDc}k*=F!>oOp)Z|8!`vJ=HC+czFY&&-B~pv%q2$N{Z|_&8GD1raEMenj>Xz z*f$le6K;WDBo^Q@ANv!ln zj&~1nhYRv-J0a(#ng|zLBB5V%*kq9mh%(yMNe`P=GYp#OV2A+q7EtIAgu9Qiy3FL9-NzLjc(DiZlnr=koqpz2s zW#4S2mZ``tbSInVSqrdmrs>Lerl_hq_a6~1!^_IbQqo z(oJrbrKd-by79;ZhIDx!c$I7O*(q-jHL0-$?6w%EYnI)?bR<1=BiNfgXxvWp;E={A zb7Pxcn$*3)cE78Od)k@^Z;UNEYyq1-#BKSG)=lWq`xNDj$9e3VQSMrWQo`xGJ*%bW z;i}X7Yfimtv1J|KD_T4M8_QT|#zE-b&U6Y&%!f8|ACM8{8+jI2f z&d^L>L%Rp3r<)hOEW;pb@|cZLs(uI&3Bl97!K9@4H)M>`q%Vz{9~v2jTxl^gzh4%a zptL0+=Az!mZ6?d zAGE7H?Z>m~{=6pPOaYiGVH_@o3BBu1O}%z}HtT7OYRiGKv7YIceZu?=vDUZ?Wm*s%1iW~=Hdk>$| z=9-PLojBp*#y&iHdTV#QJ{SpS^yaq%3CvxQs*WP_6G4m1ZQ-3Mj7H5D@d!&(So-o9 zI5`_<`^&1Ta!0y{I{M2@in>FI<6U*yBenZj?Dq-^3MReFFk>SM>NDNDX&VQZvOekA)Y9(XUQZ9TQbk4|EdgC7#7S`-JPDhDSOIEzqI@*EjiqB3_ zNvDzQEkx%}H0!=Kv`K{UNG}a1`(S($3x-IpX}g>-vj#J)0AFR(_3XQMrh{32_Vc|F z(F-bs9-}=yJ#;^MZzb!jtaqUY2TinESJn;+qCS0!<#L|we*MPfB6iPd{^P!Y!+han z_1CXcRhLTW>ONgr#Brf>56T0DX&0Gww5WRXbJYcx($do2m)&-6-|_G+TM`h!L$_p@ z1NqQQ!2cueBk5aD#MB5lf0(ku2#$_mO2-?LO-8*jaXT9eCiA=ZhL=nvxw7WVze1*?$TjrDg>Eq9F9q#f3xX#XU`4ddbBUSWQKDQcwIkUOqsesUy z=@MW7d^tPiG`Rvxn+{bTVy>(des~}Gb(uogR1C^^i1)(6^N#0_ZHPE)^=)=6Na!oN z>rzJT>T?QhSvg6%%l#XSw|hbC9_D>A1q3N0Z7K-}lH?jzz2bxK&0sx!Bw>$q#@MKV z#H&cNY{;A2witQj_Jc(~s&$(Nzf^OzanYfo|8~!hUc*g6*@YQo$NlOpak$)6sGeN8 zY&u!W4Z&<7;tz@D`m&W&RaK|8>Q+=zRaHD@2V(?1W;^{OifxxW7)0;gV|19;VjLfh zbzbOJSb9`VE4j5Ysg}w;7x|)Ga7O|Y7H@O1KSoK-e5kNtb-&3}MW%(;AAArl$EhbQ z8tYXylg^1^c1=Ok0l*r;m-1xfF6o}z5{-#QUwaup3BTOT%D;;*Pi0tQ ziDpuhTiqP`xn%#{hM3@~w$?`$<<9KFf&yCOH$)z~ zkS>+17a`SELy0QMT9PV^YR>Z?^3lrOS#)oRUzE?!&$9{eaA8ZN85#NNSNFLrEhBiH z7%K`MRliP=GAKJbC(opjbuX-&jP3f=Yr~e%5Q`O$AGtO~O%bXzG*`J?ngyr?Y}GM2 zW?OjU^+7X*lP|mk-J4hUkDNfhCn_XUk@3q5MAk7J^v)QyyJ|W#mbU9RIXlY?cDUa8 zkvHV0kKf`iyLbrw=*r^DfubbQ6+_Yt=9plXFyKW;!Z<9S#x+v!O9hmvkv2!g+EW!9 zSJmh?ZBur5=&nq-qGRe6C+8?TbIqn7<(dw3ojbg+rEghuy!`nIH%#^^0>ubew3|67{KV3?4RS95YQCA+&-xk)-_?%bO zi<))w%tATCnFQSl!Dp6>5Q_aUHKp97ua9Pm>c$$}Aa6@z|2Kvf*>Cpm?UV`N5u02a zD?4zz=<4;-@CR4yr%#=+^140hea1B%l9{fXZ`7jb+<%`Minh*sm2CRT)v`>pIWsPY zb>=r(7aT8@lKpEc-WJRL*UtTQC(oS!Zl1juA#C|CTd_h+Y+3i;heDzLmqYnKyxRYp zIs3xTSK!#M7S=!4V?T1?=iisUXtoYU(EspH7l-FrNDuwp?bEwK>2&h9`vQ{UxN0!# zpC?4U<^I3fq|g2_!~gRHr^1cwj-EKd#LUcm6%Bh-E`7~mzMoM!>1}+&e=hmx*I_wI>x!?SpXyo!yDZT@&m? zluHjchH{wp<)v3vR{r~3o@!q?@;!rTEiEmB*mQ+rP5(U|xMke4pm_YhRukuU?pV3- z{$}jy<02`n{fY9_okLkH2dOv z9};@#a2fAzE+yo7dZH*!dmm+^rlxNHU^Ce~y|*zG^5)GOgu=;1@J$bvxGswnD3a!T#Ls+$C+-xQq72eT^j*X7SuFZ4{+xXvke%xe#cdN*C%Q`FL zw_EQSqW1c*o{Ni1VPRoBt`fN!90{_RCstPPva_?}8_Ewp3OFg^lQK&z?rqf3M_s3- z{W?27K2GQ1;UTFgBjZcSZFU`;(7mNv>IY@+CF=`AnK~uT@k#y+N(o6KFA1~q^0?8K z-5C+jol%Sfcsx;X~8bPhQCfBULY*HwGNmHikUVumgPVy9LLIE`8LCoV;a+ zdvDs6mW+3v(RirTZ5uWXIX20*ASN|V`&o6C2RRZX*JSRuL<&O3HE2q}X?aW`{Xs4# zx-0Em|DSNc`eM$b76wcD%Kbe}8KShi=$KI!RsEC9mmS zBlAOoZrir!nN(#62?-gs3O;)iQiY`vohKztfM98;01_${#GX#**1JB}2XK>v?q*d< zRuqMIFE1{B2e_h@)GoH?>Xd$DZTRmjKbwNho_y1msfqVB}y$(E`nB2XK!aR%ENT_#}_2Mp(vg|b0(VFee+7s+6yl) zla+}k(;uH>GORIm+5POQ`n(@Z`0H4cx8m$S$9v%TfXN&Mb z8ly@|l5y)N(~hKf(8miKn+dqUBiEUt(jF%n2yF|KAZCp=-us$LRxM|tcg(@6)@y5w3x7}|~3pWg#; z2)ps{nPKCHrN%GR9=l(*Hx@FL66H)9Kitg9a(0!zCigK3Zq=Elu@XATs$HaI@c-~v$l26HFkp@1$(m{?%`)a$qu*uf)IG8>; zrTg3H?9K$pvlR6&pSsdD6WbHzQwjLRJV?<7LrmD#Ub{mfFZ;le3XI8&YLN6z?Ea=?(`C*lEywv%U zt%-29ix$uaXK#QA!`EsV-Ig=X_4>F(3bxGyRLhx1Z8z7efGm-!6{TtQ;&fs)hI13#l(Th8YalLZsNARssN!*CkaW&k!#cvsSRBr`Oj^uCh)fH zsn`HoN$AkWfF;shlE2i1|WNEWF$s0UMl&X4>2Agp(-E@r)kG^Bmg1% zLTJykJWx(mO>f)XS{a|5q@P~e!wr@cOTE8-W!eh0nG+p-`8=zZx@E~)Fm34JR!4Vt z_k==PlW%XYAVJD)-Wx|GynDZFe>)W*BYC<#5iwBU&Rt;hH%^e>vW^q;znEO(cR||u zVmTY2s&p`OD}+~|Et}5cT7RnT>BJ~oNN+O#5|4vYh*QJ7+%F*^AU#`?)q7Hw2XSDh8iGV5)HJY{!IiHmnB;{JOgPMQaMRh=8G2~ zv>q{PWX18>PDPiO-ySG-V5^(dEp^rS^5qMoc2Q1pQ*AAM)6DzEDpfKvvhkKET{1Qu zndPmeR1vC+7aRS-@}!fj4WRCR&!7Ya`LXlf?n;YcDi|i{L2D?ufq{V*14Z$mU!^h4QNqv` zUI`b>+?w8TS{M{OcI=o*x#uB6yGuf@ycH>JurJ??ITww&-Odm(eNaz zl}pkgtU_pm2@N z&Zd35WrJ*swXH3u_ElC^IcRg@oj$D$pwbyHO~z%?b`R={-16G!&!5UMcaITJa!LT= zw{I?u23-{=+yex{07hR+h9k$r$8QJTcAktZ@i+lx5+v#CH*Yw?nf7hIdbIpT=Hbp@ zi$hNoRZj`eU;$W+M=HIL13rBCFkq%iJ87v2OT68+8e>kOm7Z^P5%fB@Z+rS*5;S#= z5ORC4GTFkj=CQZd&1XOR0cct#q)Vz7+3*_iX`Zg1zo=G`VN91n-8jKDqHC$&3T6rA6P36XqM^s=%)92I+I$)6eJMTFO?GKHCjoKnRO}jK!YJ> zRX@39mhBhGMG7Q;|9;=g+Zz`sFVqULFVp=kf)-hKnpyPtFHUE&syHV zq@j7+=o~;WWQR1=9>pqe0)UQ0SWsBrE%!(o(bwNVI79HN!G_aG(#uE6gz!bV+t0!d z+Ktu_WY1;LO#?S8a#~5y($d0Eal;)S!2hLgObR=>xBxlV1ju^_;f&N4K<3bGZ7r=1 ztdk9w<)D`68`4gQ15TR>8OgTh2!0@2^zG|SJeGqULmmg_eBJHsO5K^d$ithQUxY#< z%Vd>p(2xLYM!1D}U!I#q1VafKqy?YNgs(YuAByAdf2^a49s8jEIns z+q`-g7+i)V204ZPyq@fg7`!I&KJ+DmitcM)%l24yL$t(aXXMj^l*DcdQ z_>&(c7sBKhN_XU%o0~y0-N#G>5b?h}P4UTfYneenK%gR}mdpqEpkVeI(1+K)zMP%Y zIGm~T`V1TnmqGnGgvNpdQ5eKJsTA9rXBV!|52WS~xoch(^Oacdss#q47QtsXHa;Hj zO>hARXVF_=75n-e!zb8IMn*;{=y9sOA!R{I&{c8&DHxtGd^iY*_MTq;($@AiBRjhy z5bB#V8>_1wVO;33g@sJu4g{A3ewew8_U57+pqfH3cP)-orV zhEQ5DKg#U|kAqaCW-gR$L;}Ev;#?kDT4n-E3(JLVtncjXJhxnn4@d*-K>5p~>Z2=W zKR(})laoVqBvQgaOBk7gMUr==N?iGL<-YDtAvbh3A5VYu`y#%M7alOn-nB zc7#mBme0TsNZEhb6j;En_y;pmG#ELd?3lx=>Ul#v4NN?`fNu zf|d^xbjgDhGJ@eOFGLOw3}}OT$g^c2C84Bb1c^i?;CTPirAxuA^rFaZfx%CI^&v@6 znM0ujShDwcuKaVSMaxA+p(DJx_VfM8P&<;KSQS}~dMC(+uQ#g1IDUa@3^}1fC-we2 zoc}72TT>vH!K@9Rw6wJWU8Fa!_>(Po0FrXF)%hnVa7LP6#!g3D0-o6 z0x~^b`u7IirgVwV^CTo90_@)8*qNK1C3YiZgl}UO4E8MV{^xxy>r&~cW&maDiAIy@ z--aAmqVbOnrhj*`;oT-zs}DUXbKe71zB(Ep9M%hudmVhzVbI1UM<-zxHx+m?^hSYB zNSjdneh!r9fusTo3w8oXIDtDmfLP~$ltIZ`}VFLFiVP+=+Dnd z+{-#Lytw#e4c96C^y$-6+aDt$5c%vpH&Cnw%nz|8dI|(x){WsElzg_v^-SVnH^@($ z4waU`=C6&1=%#=swYIbCgo-b#zrVj@iHI}sFO@U23Of>HFJ!CqK?y;`3?3=-bJV3A9)oZjDx`tgE@(Gh%m(2N!Twa;9Xl7 z(gh_EQe0e|ARomFGuu$1!)uNnJC+PO0vX3Z#@)a{Vxpm;p_>0QIJsyoWsm9y4Pv)2 zs2v#@`Hw^c_-LrBtLv~o*mtv={hV+JwN`A5U!O9MFsui^TX}MDx%Ip8zb3B zfu4>bkjcPWOd(f$zv#cT7%EK%>mz&{N^A1cXf1l8F%%K-rF-j|*_lpY!SC+vfg?f} z0j?1O3y32?MrK3_&0ZF_rD`%htDi5B+;H1jM-+F$KF}^Gb+RqXA^N8x9e{FT{pjG`bfcIkQrAckrlZg>^Z;!>)~D)z-z~?&CMy;CuHzFjT`p?X_bI^(MduvzQ4Dhsnv!e|(b)?}^r?l&dz_b-_X8C_FYk3I))?E} zO)Kd2T=&@DJ=I)nJBxwdv|PpKC-^}ueI!Au*X)R&OlMU2B4}Wab429H%B3y+q|)+=Id8u zP;B5p1ZwO6IkVeZHW>f*P6P}!XJ8AolE?7QA-YC!N8_zGSpL&LfN;PjbGbwad+0!L z1^&{Gkz>&^f~#wTL8x zGJ$A}-QC>|up|UsH@}QK(mO7@XWtK@Z-W2-yZsKw_ry+wDbRaS&Dfw{FoZ9(|P+FG|R69g8L!|t0S zCs@GR!XOz7?VMz=D`BRj4a|ni^78UZn-?x#Orzj1Oah!jTvoWO-Nw9j5a@`u(M%5* zJpmYv2Z02P!u$c)of=e-FYRE=DVI3gA;k#zp5Y#hPIraqx+jD008;lhU^}1VVzm8S zZ#>ZYFh6sMR{Pl>5A*8()P`@TA>M2iq1ek_;IU8!w$bivs{Rd>-1yua3)t2{j4D!a zE-GLU$nJp^6vkoX51u2fu&^+=0P;|gIoJ64_z<_jv^OUioR%0^#55>975GtFNrU?L ztz&h8T+XY;$fbaab$yQ!X)>MaN)P9Am`{UzO#vOB&^ZproZ#UB0`3+sZxT5AjJl=R zngHsSdh6erzMD$!>%;t9z;*L8oHy8hSAd`l65AbWv0EIzqXOelpp@i4AEtuF2El&# ze`XaN*Jz#ajmt5Jr31~9RM-$GlH0S{Ey}TC1b+wtYM8;K^cd&cOVCiy0UD-~p``(i zo?4z+&nh_TnnldNA7nVb5>F3HygXJH3*}F}=+7YhaC{A(-tYIJzWrZ^=xP4X1zoV6 zoQNpk=pJCGeCNwQ7evL~Kn>=Y1o~48M@G8=nMG{a5~!v$Mn*xF#m}X z)Y}h_Gg0$B*@;CC3#{_;EoA-#AV46>$HC0$5_(v9bi6r26S(;IXUFg{nt!pD%V+Bz z9Kss6(jb-$e0GfOq6?swc~6#TvHcu7n)-&GXMG4;CZLun5N(8LB3>etq%oD>b|eaQ zBXA70iR(6X_v55JRz3#rZ9qQi@#FRKZtOpd`@9(~wB14_# zZEB#WVDT=mtp$OD4SqlE|1V%|$Dd>YVGR3;Y0UcVyr57A@5j!UBYFpnwx(WeNeLHJ zKX~1k<_4LFSgIX>^kmIoXlXz`(&qk&2vj)-?JbnOwY4HFpCkv^32@I}oZ|sG&#Q0Q{kx`yjp~ zG?(F6P2d8*a3bXI=jTht^Yjzbf0lRn7#^5B*IJhV=DUH!DB1j;ONRa-hq}h^krxP_ z+r9t&sgM8B&OEI5C&2z^q)S+#`ZINfDNY~X+Z+u@$OHEmx_q>bkWnq1U{WDn5XJ{N zNwafvkhnKN;xXDq9Zn{OjtIO}3ImEh$8E!0<+xgPab;u8U$hj z5zz}SJiT!gyTR)ke=4*yHE<#@Jhk`t_Y?BYUKN`Pw=8Xinum}rgulRq8r|=qNmxT9 zX7-mK0aBq2#0s?R47!jh1jwoO9oQ%)_*4Wm(caPuIxgNuI%#@((Y-k)S0!Nu#-_Nx zPH0y?E8p|~kSVm1d{2fwfqXvZq#TECBjid7P`ho=d4amO{%mErOqMF@|3uYf%&%Oz zf)o!Z#fZQ&XbMvSQ$h75Ayp0_+n8Da=h-u7^!Nzz@T9&kjEr1~ZT+A(+aBp|@nl#1 z&pjxZ>JBZ!S#UiWz6~CdEKJ$5T@;2{Du|$D;57+(z^JRMt4$V%D^j$J^JrnT|BLg% z-iF=Ix;c!M#0_8lb1_uhW3xkU0LNZlUUl8wY7mpwPoBJ`g*dKR6&P{^I)qpPe0I3i zgV_NmEx+jl;l*wS-1~MNJ}=VOZI4cBB)$#B*!6n*v|W}CeoW#0KH#Q4=Sh;->$EZi z8nwQ3eLoZ*yw4R4&I&qv^+CoEwfn%3#iVkgY$G-*pw>yCzuRu0+g^7=yQtZ20Q{l9K?@3oljq5u5^aZcZl-uPeJwk!Sr=U2B{PU7J`i;p+3 zQ5!8`WML77F3A$oi+03m4MkO(?3|o?AU*DF?i7e8MTOVwxR$CQWWaW= z7h2ut7fQ_j+8>Gbl#Qc{(4vK_pCKcw^-_u8OxcDZ)8iOV_#qaqfCr3trt-3;iOQMy&t16Snf~5 z!YDzsp-|vYk5o@iPFD5xT>vI74szx~7xCR!FQ7tI)Yc|8PbMcP-(e;dk(5k6Wvb!; zZs_-p4zqbv*N*q^-(Mzu6O)jzu2Zk1rsiR3@NUB<;5rkNh`4y7WW(ltV-u5-kf7zJ zCb%|S7d>zKcPdh9M9_QUv(530YZLxbs73GZQmK zfp7;ot$SNmmcZ21l$_7@I+zA0hUe;aVe_FIJpyg{2~{_7QBkYO$%%=|y1G*^PyD^D ztrCjH()Jo!hXRuAjL$t@mvv@nxgL(Wf0BcP;~W`Tcj_K!vuDAPF3H$E8Q$ru#SOgVtS3(G{Virym8{ zFPrCOBPE3gHRuG~LQqif=<(yvZq`(*WnBNrV|l5gqXPjCXy-34|0uF!f++-ih_*)X z1RnKu0&}w(745~VtLH^Mv9THTx3RG~B%#Z-IWJWUQ~8ge&3OjT>vLAtT~pJPd$+j! z-qc%zr4y5pL8zjlvO3pCMnOTrzA^q1K9WQU?EWR}FWhk$hkFfMZnGpNCU)lJ(E^)E z!inaHhP4gQQH7jP4Tmd-v~OuT3Hti^9YTTR8rBxMy^X7Pa3CshTDb;UgCBGM6^slG z!=@9PgGcWbA5RByqxbOPQ(cd}AhIY7<}i5u7C$>GDh>frT*KujV%9@a`zyCyKd^&5UD|hYJujS?C zs0R-oD5^!w50-?x*hAx84>%a=@ddIh^<2~QfQ->;X{VuDY;JB2!Isyt z-V@~qeofGtM4@bLZE3gTve;6b9jf6&)MR?*<^-^wDW1s)6RjfKQyLl?c0wV4<-qbt%F5A2aOH+) zX7H7i+8?xgzkFHM)Rf+K!!&m=drj*b^t+p`jP-SlyF{1d+3k`07V`F=O5(>NOf6Qm>O6^4Rj+5 zJBL&n$OWA_VV(w-$9kEYn-r!As=)50#c8RkdV{kKO!It!^BN=gRWqIIFj=JsuJYze zi*Rc$S`B4MALMJ-uI2%Z6ql^mj^q_pFcCykGfC%^gVDDbW4 zpf+Go<{|h9Ke7z)*#=;AEIK2@AK2&dN^}~ZgmW%;?xoCPDO|X-_0baDQj%S5;7mvX7oa0iiA3o}X zkQ*QLVy(c}(JFPNfVS&B6O-gdyUqI`LeBsQ-%(T~hVHc&++=Bf?d#WjW0RAyK&Vch zK0UTw44qu9FfatJz;**EBHqeJcN0Q)pul#T7&ZLu-Bm35I=K1Uco5|ha&k3*HeZ1I z>$u*%dl;n64e-7mKYpx-5Cu3?RvqlOcnW@dW(dhAprSqsx{yL6YIit8!`Yb+25DYF z3}3x@^DGgy&{uG6oJ>rM#W5Jn4H(k`9Qg^G@XE*MsXFt-&s2+z`QhPm?SPcPCWxSK zEiK}DdV0BbEDQ`45F=L5%LCu^3NJ7D^z^jWE;!^%(C>H%HiLw$Y&E#9g+47clw{y# zsOOncw6(WeLCuBTd$+-HH+WLi%#0bTV=J^WeRi1~Zf?!x55X^lp`d{KWVYx1fQHo5 zH*fI4(C~i${u~mfAO?TN`@DXA_x}AeV2gp}tm5e8RMFEz0elpN;6F_7M}v4pq7d|f zH`ut{Guov9NyY<3TyO#^lQyURqAS!lu8G3Y94UwOJC1;Dq7PC3*ucQRw?rFP3!~5R z1{F{hLT_KzgZ5VtWed4JG&;;GU!3Z!2~2Q+^G=A4E{Bdn4Y~d-*da1Pg; zr$<7|%7h&dK?SLIc%GM*_JvRNBUM1ROg8jxl?1l zeUs=)QDF*gn;$BRI(780s-j{gJ`X47V{nYpmbBoZCSWd-ORm>0`Tg6s<*3ilV7Gaw z5=8|><20(Fv2pAtG|4~2#9VPOv$Sl1(pcNj-PGgH_C} zzCdV0LQCr+bm}GrtKyUAbuvxC@Wi@jC~uj;r#CN=K|QVkC#RCi1DbJW{RLKB6T0mM zNemf$fKdj{&d$UmTzm71b+9s1VUBBVzN#2|50w&KFtBUSojd0!%w}$8=Gfi*?b|EZ zp`~SzUnh28x{X5LSI}ws#UZ^Dgcp;(5M<$iqig0EE95S9?#W5~MZvb7)L{>ALRo-c zkyH+6ahZ)x7rp)J)k)~mp9L8hMCq|IAq(8@b)JWXA$-Bh-=7dBM2S$Z-n-kd_t!jDy&!O20SPlTGiL-zRjW9F*Kc1A}9Bc5BX^Bk;&?0|Q0C%qzSB9YO)O zZRJO3nWwk!&wap%%E=J|QkL*3xws)j*@GgMUCiRQOpxSfE6gd{ntsB~xr?heGlj!#@Q-*U*i zv{Wc3C#L|pb$_W_ct{5Lhy7(tYBXe|q=!*p_8o)Hst&fa)IG2xpAS4(_@LCVpCBZO z4kZLpW+of-8;$@UVAd*lt68`C1;)Pcpxtr&$dM!P*7AV?D&Lz}*R8C=)S9~b`e#_I zfTdSL4w{sbvcA}GX=w?I=7x5EW2E2(m|8taNGN7#IN^eRo(8JfQ9oN~;~N;KK$K5> z2o0@hXuyZQ-r68{*)`}dAuQh^EhD3OV5Yn;iUhL8a@9H&EZn0omvhw3&F$4an#Krz zaL}>FZR|6QOicLO+uPWv5S3a>E32E(2f698ZZ^0nDoozr1n28L7ekpfTs{0Ls$y~6W_)3A{o)QWdV9?+C`@7{G< zNS?w*T;_&PphJ6iM#HX+{OI9BaE}7>Xc#jpaNt%Wep4+WDLLW>+gxoP(MX=UQcM}& z8OykSFhsXwQIM6z4#AAm_W$_tB1~HmXlrZ3K&U9qV0U*&ceAhW>5GES@839BSyezy zwStbt*RNlrp|;)N;28Z0USvR88grU4+WY6@+qPO-< zg9K=34Ssb>t9N-89+qRpU>@GlTVrWHTG`S<1pJ2_Y8X2QM{BAYn~%A+u_DB!pG z{ekeiB2no$98N-Bz7}NfcbNTz3KnBIRNAjP>n9rQQW9gHEUBY&38s`^LuiizsKQQF zzyQdOPA%)@zRjR`>dLArr_l}Inb>ZcU2yey-L^U5)k|Qh=YxZTiFM20sXTmm1?n%( zj`go8u%_WJnx9mJT;t~=kp#$nP0Jy>+bSq7)*mi^ z%Ih$H5junfz~@+->+A6XE(t#S^yyLVf=X^yRwV%55~xZMS=sbnYdEqvIheiseh8eY zZ-9-eFA@?HMU<4XYj$x}-@l)M5d-A%;1~FV2{?~`8a|>_U^RMNMn)z@P#5q5YWypI ze=%$8tPV+}3ZA~eeHz+r&(qUcz-YU#s7TXp+K-GsxafA+9eV-};}@8LgHm=|E~7bf z=Qs)izaJV~G+0RH0*?bB=rI>M$0sJPsP1;vbab3Mb?Q{Vm4d?AjlroRq-oyqMWwJl z1IJii?6B|&7!V2tbFAMQ8ekIe$noRHQG*%EX;Q&fhp<$dK-pspGk9JCTR2# z6e8yUZ&034(&`%;pantjAw2xZTlXd?^5_!7iHV6DJUk%iso}F1Pr#`Ht(ZADXKDsk zC{}VB_@48?RXjXAaskI1(C_yJV-&G`AXc#|9Rwu_xnFW}zCsse5xP3v>hia@3zB3X zJ)(gJ^aB_8N%4E-4Z;yzBw1F9`EIH68XlYzxJ3x{N6aMF!LMJBf)h2lhs6Sa*~oAN z(FPiZhK-E~`ixv%RW&uo;rB#+xMgz<;dU@S@t}6M7$M}4Z?>SCiTM|M=17d!mGjMd|g<4)<{$dDv>_X5#ROP=TGYEFSa}22X51b~%v$M0&si_Us z@VmESAum88Rsmyp7Zl{Rx#{S8Nia;B0|3VujjjTS_3`mRA_!QE6~anx!A2q3u)Mqs z(eexk8Ok!BFfTW^dUd+}9TX9u#(JOj4PZ$@ve*JnmoaZ4F;Qg65)gRam@ZgF_T?4cA zFiaJ*Pr~G*?o~RvV?aN?`d#1|URYqesi1xTz9+DLm_-$KT$vzn*qzVoMb`Ob7zM=u z)>)Y@psb_>Ax~9sejae98njTsN=Iks$j_h8EX(#t0g8BCH*Z)5UBBE{u(+`Sb4$BV z0b4-^US?qM$uG%G1DHAmE<<#5G+z``Dm)jAw;cx00C5BIsXjk_dI-|{%%w{RB*QNr zLad13u$K_W`@1Xh0?w;QsqV+Cg&m)#EOFnXgcJSg&$Qrx`rg^;B|%#WOn7N`%LerH z6DuG&_Z1ZG0R2YkLHWndnWv|veFuKDxwCVXgM$QYCtlmBGnwu%%Hoz2g#e6`v z?1E7t0;8;`VHhy@z->;6h#G!}`O88=mw@-J6&WH1$jPJdQzAK>R}!`X%%Y z^?-5`r%IxW#eop|!supPLIMrcMj#1ZU@>iYy_AA&m5`FE1XZE{+Zx{Q3YwKiHAQE% z$m{~r9s+my2z>77D)KlGIS*k-56cM-g%tE=-~V_AF6L>t1XBJ%#k`A%@Po&+(Vx=p z6l6sUdhW?*J^8XgzfN3iYtZO)>Ew#G{RW*x^-?!FW zbB@t_AAO9~oW2bQpTuWG;rnn(Q(2}pSfTQM*kMi>gADFqleU3s{n(y`MZR->uYN^1 z-QSSjE~YS6Y@(gl2}17-9M3)_F`!iB5Y>-`9O>Ab+_ z5pah{DkykUsR1RVkBK3d;;(w&ySIGo#}7@iWBi%6Z7YOrL1u{m7wIs{N05p@9MG?L zaWvx9E67ZB;M4MfqISvFs~HbJ*&$$Z*HilB$z?nc*aYuQ00`^fxZw>^=#q?#CNwB5 zaKj%_rpH|nI!fVSYVOXm8oNh0$I~+(C^0yGCdT68;t^oMl1Go;f&E%% z%o!#1_1;2dg@sos^4LoBTX2C8%DwRUz|R|a4js}42I>Avb!L``D!o%UTYw=v@Z!@h zcSPaQ$QWXpI^3#~r%&Iv`%x`d^5u)aTr`v}ZcGSx{pL*pVtpN|^Yv-2c@23Vfu!Ui zY(~Ggl&tR$3YwAK+HS zBoaYEpPs%x9te5-F!gOX-@H=voYg)gOIj`AOY_0%x2ygP)nYJk>RF#sAZsDMD zK}G25==dNkBI51uzk|ayCWf;H(m@Rj9;$9~}037Pk2!%?QG!YAe9dH?d(n5^89UE($(DTjW*xwVjD0ddM*OU2FY;FQ&(4unN&21w{O-++b zm_$9; z5D|;^C!d);O#V$xO3>6^<`t^`?Q0A2AF?-7+kg5?xXWN;)S{>;Xm?t=g~^g$QQ|m|^GIySwj0E(WPz z2jnyqYwmI#J}oA*SvI>f?!A9o9`aQk@6SiCYDJ~#&w~fg{VB}LyBV5b69T=^`Ae7X zKpFGYnjCJ^!C6H^++CzaYVhhjVC3GGC6#+Y`956Z0%!<3=kDRL3`I<$lM4z8l5ah$ zpfD8_CH9|}u=?%TQX?=I^iKW-N9V)Ce0+QlK3*F%ZOdH-u1^3TWq5K#9i>inAhU0u z-)6RH+tL6* zIz~mYld?g7zcY3vB^%j@8p0UHVUCV^i`ejkP_de(0E#GG39c5~zn|vMrz=q*=H%pb z1Y;&`r)Bd{aPb5R3JcquKnNLNO!@j4K#){EK@th&0fNDw+Wk#IQUO6%NJvN!=m(ff zFIT^OvHrm}O-mudI6&7ia1$sT@@SwsVTW;VT&O0_z_F)O8?^(_DcL3=3Y?Y`&R@Xo zHHb(`mh$pj!Jkkea7LQly=&L4M-03=>!B3q6>CF|IWH@lhprl`bh~C?=j{y*4dj7g zQgiU*Pv}@^2QIUm{b`obEB$w9aCiNv;f3y1tuX0l*VYnMD&h4SREd3k89;a_N3L99 z09S$H^PX@A;5tSmr=*j-R`U?jT%Dbr_1f9wZc-aY>%ys-k#wGY|9?(M%nNa5*eX-IPbXJd-#$>!zv(@SM%Me#_6)Cdp@!Us05m*2Oc^#GwE>O&)|?@9@zSMO zq&6zSAg6N(6!W{iFo9UUEtrO?Zrn>XJ@ z9YHC>nhsFH$lUxCsJ&y-rV8-QQ@XnQJmn-MSEHR`8Fn5e8%6ggZf>_Bmj7|^;2N|f ztcm8>56cM+JV)<=FIqJoV>HhWA=Olqwk?{+LG;*T5y$CgJa4&mY9V~_j3!cLoqh5ZCW0fCn$ zCWcuKPkenJ5xN6Lj7Ls!1SCBwBSVMq@eRNL9wV~bA>M69oH=vm%n`Vk{AD9(oQ~n( z0tjFBeVv}p2tIQHWTFE3@_j!C)B$5%48yLrqiQNQ79R zE0c=Ic_)DrU@v}zRsbpres~L8wjF&DUq=TZZcYp}M4iSWqOdgrIr$|Tl}K1XsqHS> zhSdNX)8C=GR0n{B(@vrI3t2pW>+i8uefYO+ zF4Iov+O=!LrH5LwKYm<>+WLE}&SN0qpycGOC^VkN-t|0fV89OeR)LZln=^T|eSOEzxYI6T~TTrxm7 z#XfY3rU=E1n2y!nwldH3pEA% z`xd-JMw>icP9mDXmfQ9i-8dKI<@b(Fp_Ac^q-2L|O5Vqh#^DZBWS_x3t|SCY2s-qV z&6ki3Jc72GnzS`UB~3{YP<%I;HDn=wcvk^)JnGlRZCK)h z($bIp_3>6Wh^*M4m=XtAxt$VYF{FaSFf6+Z_F{5si6V`!GQuHw`vb0vCF-l;sr+F; z`_IXsAnR7irflGH%h8Tih#P-wrrEWbhKhpLs2ndYbxYmiqN4TNwmn4bAIp^n3xpKC zi<^5RDF_3eK@ix0e~TV@-eaZ(_w_$K*ieW5u`L`b&hOswsz8EZWLM2}MqOu+*w_($ zoX_&0zvaZ~(?P+(OG(FP)p&Van4qrW+fk8^abzE&r)U`ETvD9`>+<`szY{K;(FRUa zi_qU*0Plc+$0a3_Dpnp>)N52{)In9g@yfUYp~JP z(Il^w?&(R7E+w2yV)%OxGI4k(Ihq?8wVu03W!Pf1{a?9mH^wKDvK#t? ziT;3Yo$;O5IfN`MEXXOw8AOz6y%Dajsp*6V5`<@y{;(_zm2Ar^p>m<9+aM{ruNHu#V&Fw`^Gs+6tRaZQl$Y7b5uk z=H{KK(zz%e`n`7ZQryv$Ee}r+P1%leL{*iEXz1@C!yB2H+ztrX$&n7C3>_l}#$jWm zP;^->!$~~}fOJ(~ziDOZF0&tBKhgpLwF2)^98t2tpFit1wx^;&S|mW!?k}RvP}Cp> zz+UalB(@U+d5)T!^TSMa8|Bu&rYn)Y65e7vlYNW+)`E9*Rx+S(M zp_iy#>WKyM>~Esd<()iQtI>Y(5Q8n6(ycbZv7U2_dIjqLjc*1b^uB(07!x1=zAjcB z*Rhs@Qt}5l#T~(cc0*LbD!fOppk?D>fLqwq)`I38#R-C&BB%J!-n}cK%@ht%LOA%g~ko$f5xIyTMLA#4uq4u4s8Kd^QR9sJ^DyAEeI7vIkFwE z*M`^U1Fmm-dgsBnfyla`PR5%I;Rdg$s(MeaYWqn;Q%^@n4Qxt2!UX;3IosqR6@0_Tsp*>kZe&0-y@aQMXbX0|5z2Gkl* z!)OB#+_)(l;ELqlox+xmu-U%C_ITjz+!47EVhj&?rsynt1+mHK=FJb-xw|ivp$q1T zpPw`IDOi?P!RpjjREMT@a%reDp)k;fJeP~0$&lbb^w)z2*{CND!+lF-IX%~mktT!$ z5y2RU3Ex3|{P^)^D?a5*z=bGEtw$+fD;mquB<%!-%dac65IhPT76Lc%3_2at(bKy| z;*9x#&xuqZZc^mKVgH5;M)GTgTdVIcC5YiD@3*4r53q~>GzkzR#QD7^P`UQZgiMI3 z7Po<>QA^)IN4E?xjDoIq>SW>VaUs0FqWTjDF)H`b+f}ee)Ya8NbX;oO3(Gy)B-r}izTF3O8>61T76lIC2#QHaIEmu1XOwD|Jtr5} zQ_CHdX{qJ|G*DjOgoV9=?I8UyEI$}pizQZrIzK!%M*J}`Xy-7P*tuiJ4ud~YH7(m{ z^^b~PCGXEI6?b`;!PEG?zkU-QWGhDi-$bkz_-1umLCj97k^l#Fl&D6J4cA$UKrOn3zC81AaZR%yB%th|frR&d3fHe2awJt7k!SwL= zo=X%*R3%RMp^%ID0Yzs+@hqQ8$;r_J0z5hm7@x&ONF0tbe3 zlugGhQJ$`9l37#*g2my1prT+8@A--YOhJB7NyW302xi;6SLazOFxfVM9OSiZD0CG_ zKtMIT63xHk)SRyltMl;pW`0`&rjV=ZTQ1PEI#fwGP>lj%7cHn!{gvbHp{ zzAYff2;mHTEpU!68cgLC6>Gf45sfE1+@&jk6<033eJXOrQ?xEn=J;hXXTZH6Hcs>884r_A{|8^FO(g}xV1sNgLH z{l)(o+4cP-DV6H^$IKj3NH*Gq`C3b$W}Pav0B0dW#gQXNB6YK-x)|U*TnpjtqTJAv zJU$pNnORu)E%A756f$qtB^<30fUJF2~wj4aGW}V5bFK%aT=>A zN8l#_x=*$o`4a_CV@N}&7^}fFjzE6Lk8wWhGh}<5sWvBk;VwQtI>OH&_rfwnMS*?{ z#JQX}ozPA}EX^Rjd5G{a5Mc*ridwkPD1H~g{VPIQQ_bh!tM-8R?`~EEXYk5$I})fN zYrrT(A>|uF$)Kq077*A5t)VW%hJ+}Fg%!lHfARKheS{eayN^DeKYw1Q#@cqJ-O)KR z_jo-M6GyCI2p3A8c(xBtyi%;v($s7N2XexJ`*6)TsMcwC-=e`aCHzwdbW3A6)-ZD{ zG9?_RH~1mC6exfM#43fP3b|gjBE_v=87sdpY zpwVAXrFt72c>MZx72g^kyLD+B8m>cN1G)+@$AdCF&&r|_kqcP!5}FF))^28IqTIm&qT~am2xpZX6x20{R)OGC79c`P zA^x%?Yn6t?dHnW`LL0%K~;J0{{6WH1uHQvLuAMn#~D{l3s)`b zGAA&}z!)S%#Ro%ijDEZ+-%`PJOts)Ev`G>t;juVy@Lc;LyRh6hlN`&Jo3i;B$EYu9e0aP$g02#M?? z9%0H&J{Q^-eHw1cPH zYLy4Lz-2m;3wT$7*?|ZJyjBDprK=*1sX33>2iTzyOLjgO+DLyr&xic{Ad)z{A=7QZ zG>BaMoD}b!xAV~e=pKjoXVn8+jaAmr5=Rsc$Y3pDhdD-PF6c7ER{(&u9Zv}=UCCMW z##>``mc!}SkxSSeC_L1QTU@fCEquR=0y7hzt(I7#aSk#?r`_&ULBcy=mX0U_J2psXwplN5%r4PT;UNokgP z2hSp@Tox1*WMeP}CulG6L<(6z8LRm8DF-DF04GYV{kA%xq*7Y!v(&s-ijEsMW@ z5}r>IV2O%`nra090PcmbMbK(^30J{WPC?n)mB0o$xt(J6jDX9;Dgy%t@fg8CM{OxY z)dZauB$hZ{;l%w&PzqEmm7NfYa^W!sDqamq46ttW>rhJ;2bZxNJvkHCfY1pkCY`y2 zA`%2wgn@QNY%cp zJkS+Et_^uHS#QtBB5+u&^vjC4&ay3njtz5z^axARiQ&J4P|kM-T@3+j#aP zMerHC`@kd=KCTT<6>`@d)B)8&iqfY!p%$ZmedVfEIpAe&ot?|D3E?v?Y*K}6MsggS zo*TAqE%;%j4nVdRex6Sl)OYvp-B#?=6HskPN#n)1G1cd0dPosRQ2<9&bHuJ|N9Syt zh@r+ddSD-9gls%4E*NLA^be6O1N;+=&CE{1>bZoPIwTJz!}Y}A0D3~a!HfbXOJQPi zEt#s7!Iz-BPz~+`c>4@S9kA7i0`;}M9q9Q17>PQ@2JGeFxQ{@OijQS^4vIqk%^3{P zF*j<~H8gw$A2?UD9GQg46C_kJP;>__H3!q}Vo>c_y_J)K;DX|{mZ_ZUZ@l^RLoiBD z3-#uW9JW}52yk<#0?x5XXAR^rY2e)fdGOy&E0^{IIw;kldg}!|jyjwG+rfjHh(n~x zN2P|GaB=Ky8GPV%7{)aj;HucBSF z1O_r8ka~Tvk01Nf-o1Ni}PqpPrHUzK5UA?P{tnsY3C8*$hHe)Ngp99k%ZTuO;U z3o^r%7&R)6h}SlEx$>~Vw^7W2?7WLCEtC!@r#{2LI}afh)S^)_P5NM^oO<)^xQ~LO zVw|I+qh2vDH}^G2vgt9<4P&5S2GFydb%13(4wSIQ>$NC8&l@=f&R_`4JP;6Iu!Tyf z50i{DSAE{e8+Pb9%LQ1)1Mo;OkjXy@6E}4>fKiGiI?=RrbP9G9kM=*S25z;olEVnI z%@D2o=(cTB=;qd2;)q$(oD2*Mu+e_}GU1$I_Z<22xwm(r6Glkz+&YpC%Rfxb{Wyfg z&fAZi=gfuXNTEDrIqWCz6AC!a5fGqV6z5%K%(6CZJXYW!j-_EhYOJcR)`X#11Kl$H z2=Uq|Dd-G_{lkNF@xQjRcIJZA6Acjo`Y7@t5(vgRV4QDZ^$o5#EDEZ&cVXKkF@s3^ zWZY4xaKZgh5c`Yya*B%Ep)y*9o3!K1`1_ZJVD1kMa?2Xb0s-cLk>ygC2|NN+n>M-Y ziN%rp4{i$o>qjS;sW1CxI?T^;*A6M89@`|c8x=XY24H$K9zj^Wiz+aR(7QH1jKP$Q zbjy+3o~NrL2hg>L(p`CR&@R#7%s1iywI~2zkI=@YU`kXmgALXyJ+}5d3MrGAho=Uw z?V2@ffDoY~pHbp@@zW;Q?Z_XkA{OCcH`~ho&6MqZrodGUpeZ$rv}RN&ew~+JYRru2y)Ss0)BSAURuglJbOwu zTz2j(=Z+y+BgMS`Sj*2&ytLqC%^3%7%+!qc>%W0pOl1e!>JL;6t@_c1WC0oW=3tZe zgw1hmuC3}sBCeH+m0z8=wYG-DUv$U-kT0i@q=QG~h|GUNKqi_Gpz+nqm(YCHf=E(e zlgI%IH^S^C#3z1>>R)vH?)tZi1wNer;acUeMZZ;Dy!N^wWh=N0A<{fc-SHf?a@6oI zoIg*&n$5s7ja0d|788z^!;pO&q>6|o7}BwoSLeL{=pRfwVPn>72bDqgE26!0@#5FH zSq!|n2I5T^Fd6b@i$S=DF)gwT4dJxp6rk8cu+|KEP;8 zME7HRH(vufhMN0^Zymg_Ayb)rGOD~=@JJqZD^y4JGoa^HhU69CB+}GEEM=IU(aRSe z84V2a5?pcvdvP);Qf6d`fY%X7-jwCQgd@Gt3LeCuH*e19O@X9t<=5W|+WZ$n?AQBD z_D4`cA)_4G&%vMv#pt7uw2Vwbp1IxLu;$$fE{__1zJiELfE@&PW6Vk;@g7!#R(tj$ zZuX^1jz&)0xKUn~H#{g(d%4^V1qe0NhM{DW3LARmQx0PXD4{pypyIGY!l&x$&N-1q zx&3{9{t6uQwi{L|zf7w>!E1g~DDng=KWhX~AYwm}zo2Vi@EF40L$L$slV1rds#~hb zYRl>SUyD|?09WR~pNc9JclB;0V2Gec(4$6TGa-)Z` ziIEU?ka|e@8a|Yo+S=0~26uYjynIP@@ZiDVkPu__Q9PYBMe%|#^`Z4{84dtc-&;7P zMiXsdA)nGL3?=;q>9QLidJ$!cfnjX)%}@K5q8WH$&v&N+0^+$Ig93e(Nka8=9>4_D z2sq^L;Iw|WR#Z?RC@A4E83&1Hi0=ZL&4UI%j9wNh3N8m8O}PPy=AX}Z6(>E#F=Ne$R<5~bsp5a_@?0YAjkO@Au?`SeW= z8z)|>^h*|(OS_hmsj-8;`p3yV|H^TT9zyxPWK#aHvJ!xXoFda-nHPPayzCyK%OcYF z(e>F}aXS~3N{M~f)%AX8C@YG(FWOS}7NM9(nxwxC#LESh&}=L}pc64tqD>5)v1{>3M7qcR zOpmm?elca)&Ew|)^NEiarU9@hFs)R%3r$Vex-g2u34hAf_ET@ zd;q5*VkA1voZvztUegyyT)Z%K*yRg-RS^4p-$f79?|TF?5d=*DKodx{d+<1P6?lTKqCJY# zaWRW+6;=lFUs{9ABs93*u~$(w(W>jPEp0Kk0&%phfd`V#-Fx?5q3}l9Kw&(14ELw0 z-8CJZm87`?)oNsI&r}&c_0^cNn**k@{qpHW^>Z=5=(3yHj6|)5`{F*?UzqK!XA2PP z=kI@2OY0tjq|v6-I7}to3pd?q_pEs?q^lDMwc`mrtjC#tyNS00EQ*i}u_ zC(zIIA~f_q2LfkMFRcP;qksYHTWvHGsTaWzGMspR5En}d8c1E)c+%9%SEEf~1#{wR zCO`nvOw!S@6h~YV-D6#XWC?M^-=mb*+S=-f!^HPfs1qRv&W77?5kXUE`}S=w9|!D^ z#47^>-jfI6u+V~F0JE5qNOkli?h#v&xh9b^0L52Cz&a;%( zfi34%5s2&XQblM-rRP`-o02zRioz{d*L4`$K=>KN;xqad$^utazbu^u%qN~1Z;Oq8 zbEI|5$65@HC_bK=%N7fNb*Gw2{M8*diDTUEMY~wu(KbqMC=Fm_`Rn29LS6oP=YhKI z`LR#0N*27(E$F{`PiifXeeA3CK1UxME4plR_w&%YH0|<>v4T;;%|o@F;k97*{2ZeB^c)WAko5GXjSpgW0(bj5OeZ-MpJuXqmopKf%!Fwo za9$oa>GIY#h~qj`t37fONn#tH-ert>lBo{4?0tNl2roPaQok_{qZK#^8MlMm1j9hp{>>=AFxBZil1n< z>28^yZ8>DD>B}m%i45q;u9Xh)bX=abK3CbL@QlrEp^VW|<-K8KC30dL7^whiHnM={v4 zE444G*s9wti9( zF=Y-DKY0rUy8YalW?ENx2wGecdxT~$gP7})j{p@qz!jvh{^Rdm$D7ffq)|t4}|_V&IL3dgHs`+;~!!ovuxLo=X-tv)221B~f}`>hjw~ z!#uml%oW>NFRIm>LvAG2voz|*pOf)?s%%~#5_YyVP@g^UQRyx1g}FcH{la9-sPvmz zqyzk5d(Evy)jm9xd2wiq+L{^xifFqt4~z+J(6D3=+xJB#y+%%b3$#b3Cx=I+h8>$m z5zw?hHvcswD2$0KT1KxF6SXI16`~s-qDHBq!*pX9`>&VvP15weTl#&stFRj;nMIh` zLiZ5pe0IsC>fE8uC&$+oUXm8^fI~7^AzrHoCKtDkXP2Z3v4T99fKJ{c(gW{-d6RKI z3M)FZgLhr76(3BJ#2OsjM6-C&s?^taP&lw&q5kHIbHn$=G-{PCA}jdKdOzbu76(`Z zZB5*Qp|MU)l{*9g7r+H{2*9BnZ7kfFEsJ?yqOjcCnH22{?Z{&=+)R;s36-i7TdWlM zu?lL0pK;zc0Zis*vzz;ij?E-z8GXliqhUG9p@BHiS{SfSPTtFM{Q8)C;0&|FRIzH@ z7xTtWOgJjX;yl^Tx8Htgcd4%@vu;!J$h{N+idx1&g*ayT7sGlNW_#|}1pg=J1Jiij z^s4*s6&)&9T#+3bV6BH1!)K6(qAT$~Nq!D9^)X98pvy~r zznKeEwh&N7a41#t7Q2b`@rLVGKsmGn82x>8t>|Pc?EjpHzTCWo#N@k4T9^xJr2yu6 zvngH2ub*>6;xTM=rLL~)sM$}+Y=11o4LpP# zqm?j#eMJINCFmx!6<(akVpKn9=5;4Q2t#pS!*hX4)#<-yZC%r29g#48pw?(Z;{ zpBK6osbpTSDKJ{Fn+HB;W^wooO)AA%I};lX!f#T9@in|~%R%{-#73=z_)0U7HTjVv zu9!Wo<1eI1EoBm5*uP*zj)?NJ|d%NTD64_la$`Q{n6P6(#XfFzd3T8x_vd_UI!Di<&WIvIH3es70c9|CJy5HeQy|#cWpV9qb zQd1mf9(Lk>=Mn7HPHfpLor+p>OR~H05xj^&;fvd-Mgj&d8BYqiN+T2=A-M)3-^d(p z&T^QCDTEzvu0p7`0jPj!qZ%jfCNf^PTUzADWpd@X)L^{$L;*9rMQ4WJJMR$U7avV) znPn$Wg&#qM%J5MF+=nm7WS-pEgNb#oD!Y^P>$%~b(Ie3wVp2Cm;&Nq+FCO=ITED62 z#u(Ic)VdDImo6UG?6CbiC=)>OKr#_Vz&hsSeh~`YnNe?2(qwAE5_JT*ib^a@DJNfy zS?}I(AhL>+go9?soVhHSV5~0Lz~njF>SlSwTgYX4s$$BhKcn%74FX>u?5;%4M>8J> z)Yx;tJ(ZxFIME__M@pxhL82&-C37r`*y2S++>uz3jlHbfm^iRM-xnv41p}MK;bHg2 zU(-T)ryihf43UET86~e1ZoyaI-~ z8ZwzQYBQL^ii0nOn{Zu-<~hWg?N6r zSh09(G3=SPf?bN+g)NQP0Wg(pM?TiyNcuwXMOQ?8SrKNA)7p__i4!KWVyC6r^0ka! zWNYKpF~qH|IfnYcC>~-rUQHy8Uz zbL{lvzoCdWj%Nth{TUO@G8gD66q%CpySSK25VXoVOzxVLM<;C#-@1nNI8979 z{~X;9C8!P-8&Oc+68x|#S~ct3`L+gyZ8{DLPcpdhw>U^9=Brh@w5Q!Lio`UYa$(b; z$HtgT-qmnD`pX-2VU#AUT5qi|$=jx_|Jk^P9I7c`41WTaB=Q!f*Ev=zgiZ1in*PB$dubeZ7o%Rp_G6M$p&Y;`vJMEh-wAn6C5&&FX8w8B(HHG1I!z--Q>ne zA66BL0wmia2O8_uMrEwpKVwaB1zy18N8p~>K3@;~vUyPX1A~eeIj0;3K@jXRCo+aY zuv-|+{5VX%K4k4ejxIsImeu52SkP>-D}5A8%$*S!cvpqPloV0<_tcL;d{5O~Mfh=y zw4vS7+JU${fm#-f&DSR8mixeFk7iC45?GD&>l&>V*%xea0p$V+jCjk z%P;NVYjWa}OZQeV~K3^bt+`R7U{h4p4#frq{MqHT-fsz7{b~I3n zq>EbMM99t2M*T7G*A@dF6C@fPu{gw--zj0OBl6{Ez|p-3%PJxG9|JHGi_toD*FEBd z-~q{opv=~%y02yBI^p3XDmDWN!gzs^6{31Pwh+sdu%i$v)~298-55Vsf|_a|o^Hqd zY}UNo;fWPulSlE8(}c%Lc&xDFO+H^Z-HB%y`8k<90|-vFlQIg3sA%d@FaQN3=pzO1 zkYe+jM`-S7y|b6Vk((xZaC3__NW=vZ7vSH86R& z!#0^}qz@&Y5U#a?-(D~m;)Nr=vu z*GDNpo2H*m(npTcu2k)M^@zO)`)uJ+z6}jncu?PmS{xGW0Cf8aghxkF0g&QFoHhy9 zDpW-n9cD%d28#lCy9p720M(bXnbU*CBtYW9R0oh3-E=2q&E6QqwUqb?@W`oHX@=p7 zbpj6=s$|a34s}nr%-fU5MuKbFVCr?J>q=(TGTxZDio6_~Cx})%FCg*`Z|#7>TPt>Y zL5A64zv^pb1gg-(nzFDZcF?`1b$-+eZ)5CPB^I9Usm z^_^mV0^Af{y}#o*;-5Ae!>quPNr2Dg@axZ-pR32lGDRq&j~W9%?FRHaXqzV&h7E?r zipGSF&a-_BLD2DBNM6Iu{~cs)f`D)E7 zn7Oa?Sq1EA{ca?ViC3aoY&DapFbJ74*^&}2Bjq_a2@(hS`Vlq#$RMTjIipp_Qvn7A0Bp(^;(IXbOEUG?=^_FDa8XZ?@`g zvhIuQ!A~qGxk{i7DPR9$!>bP5W)S%z)}P2i{t*Bc;?b=PPM;o3 zBh%*G@UQnUXv}ns*^pUVZ0Xu)OVjm1lz2M^p5g@tO5l={I>3Q6UJo@&qKXH_5>3~t z-HK?ZmYGzX`uT__DXRpxH%m4Kfe}Yn?07b0+btx}`zDCyti=%c0m1p>sFg56tm^^A zCs!;npfFoby7DK-iq2aD+mJvoR+}}KkQ?8Chd|!qC=)e;Ag%f6VB(nsz3VCkUu{~Z zRRA>SSn);CMz&~K^~U8oiTs7;uK-|eEu=Txq4;Aoj?Uye%$$Z5${Q-hLBa_YwEYtr zjBCaP7SddH-sK)jGDs|R5`y~oGr|+(fTEcvdLlXyilh-0cr1ronFH@H`^6sjQC?pJ z@{h?+JSa*s5MAiXAHs#tM>kq?`AP$rlGye!@0spN)<0$LCrri+Zxbyv!<<`Emi zUQ~Vm6_b?N^-dUUln3uBlu$M_jRQfTiAFI{Nq%rPFp_<88(`JW0+BfERfiC&$tpkCBgd59KTd@tfS>lr;H?b=Mvxq z(3DVS6MRuHKtT^A`seJ*KG=XE*yB(_Z7NZP;qXsMHc9OHX*)!g5r~x-xbfzjWZCXS zz+I%dz3Bn4A33SLBkUY+p126f30n0{ms$mZF{{qGsO#4OZA*bs{2(rNVX24+$|jtY zztiRb!TM_vznJ&+3(TAZ8=r!D7LSK5bF3B#_wvd~5`Jg~(5qg+7M&hf#`7TwZ{g5~ zyhP&NRe>2SI~Ocr(VUyiO+VqmG6MVx0C!d4;*~%^oG6FI38MzL;~-I53OXcFzq|#_ z^x8z^Re|KnaIk2i$pN=$w$k>nM==RQ48*a@DS38|u7bU907AqZX4;vE+(h_(VfRoE z9!>xbheOZK7FIu_V+7(8Zd$(KG-GVulW8Q9Gss9aUHl6QtXhzQ>5?DI7WIZ3PSG7> zt@@p#e$c2&2Jb zGCS<8Y_dvA}kN1}Ve<4(Em z^e0r^8_!L}eu{{mBDx^;SH+u&ObwJ&yv||rJ-i1Z5Nt{~plteKW$2=+u%~%3hLZ4O zg8$ZuPQIw*i{OQ>(vNK7lySK>50Tyv#D$am4uMoN-$TVBW0u}T8MX>*O&$-CG8goH zh<)Y8`8(v=1c{geh&x7fCuGF^{j1VRYq%%iq|iE%?-l!P2FZ2<>69k54#>OQm{@gX zAfkat^B8GBiY4F(o~XgnH;e;^p(@4&ExiD;0a2Z04o`%N&FYe6!QCDr_XXh=RI9cF zm4>MAB7QimDM)QR#$P{>JfiJqdU9=wiMKlbAmCx>+aL-DzMBzoB5y@XD+9?D%a z_r+$e2H?@c2g)6tJ}QBXpV*-KTnF`RavQ?=TnvB5~ zZ&X4A5QdOJG+{)4cPQ%#xhW(#68Z?d7ZNi+Nl(xM;Vb-g0aXjJBqkG*oE&LGHpMXH zZj>#{@Y&BH7w~BNo5|i=$E5NNMcYZ#^|=m<9hVLk~HcLX(f# z;E|GifvqG&k|KczsbOC>~*>eBM9A8eYyR@MMIW%ASE9n zn&z&aC!#ijAIOk25w*#elNTjB^<9ou5y81oCtQF;5xVF7Q4s`mp!6-3h|nUVWjYT8 zT!bN2afpFh0DLK}w`pY=wj}CR6QNIroOHqxi6r=PfGD3R>;wSQbzuWU#Y`Bxku@Mx zX(!0ZO-#C4vE%0Xov4LL3~~6|0%u^UaAAQkV+jC7C=QIiXQ&FqtB}a2)`J+J%oN?x zEjDk}_Co>@gCLhm#7;(hL_MC83Bp2zhiQxMIT`?7u%#bH$J@Y6>sU zUHC!z+}Dy`K9UCvaKj`fPSqr3dVm|fh8HkAg8eCTsGo^MNPs#T2Tf`evD(w#(G8DGlt6g9h}r^LnoJXF=SmIj-GSrZRoWc;1t(*Vj^?Q|BRkj?4%bI z#nvZpbS6O=lp^s$p4j(_GA)tl7vqMI%*Ys32L71G(e4#VtP2TY`DG2fB0+9@W zz=jAhmGPwZRSKD)EYB1mWrV=ler-}Zh_9Bt(usX91>JpYz|bKr>})$jE2#()hDR!b zq|!=Khr^VogF=WZN-ASOoPvldb*+3bh;RZD7oogrK94s zy^{`tu9|XlgjB+yz7s*U9P*qyx(YiQTjtEsXBW7gkH1898ysTn{VnN&Y@Q0uP2e8N z+aL^YZ@i9od6TV_k!Ok9m1D4pWfiVu)5sJ}VqR6wPTBq8ejo2cEibU;oM#t)rh0^4(nZ_!e z%$^d;Jwuej^fbV}-u7}=g1tj=dlObHZ-{o0b4WXfdbdKGYCh1+vv5tVeJ>Sdp!_cU zlYMU&<=5*X_ipTCqy(ZUN8~e5AU4J<@T@ABT8M;%3o4S4Q%Vzr)zhKxULCS_AhW!9Mk+(lT6ba&zl)xUMBditZ zcp?myp0Fjf<2?r(?E(`lC4rR?DHP-j2|I+6Bnk*p4mqV}TI<)9XBO62FCRt}F=@~Z z?L<9SA>;c!OT_iw$7a$RZ!tnlIRXa)eClJAn8E3s1CfQ1mXg-x8vh;{-8Ht)P8)hc+#maz2 zM!#;nx8dGzFTf*?F4Ttb;@&_9Ve~@!0{n;(tsJcmqqu4us1* zMZ5sH2Z}ui#0z>XxF5w^X=KsbA@;gI2#=O|jC%4vxW5E>W{E0ShkBtHX}SbP`&kkobrnTY*^i-p!iV2ZFHz)xQKo>)~;DgI@#YIn`pr7P$iO6JWJK&%ps~ zryE;T`hLAAQceBMO^T9=N&xKqq0q|d9%G`X7&NM4JTnup(NIvTZK%JwX^_xN><(~( z$gU*ACjv>u71pPe@0Di{RYb;Q5(3@Oj)%g2pl`NZmYycMOl)XHdL>lt+OT`fZ!f4< zyY|6@uwUl5WnYUdKL=R7zC`>Gg2;@64gO85frbq zv56CV#6OC&Kzd^6Xe`6Pg#f@jO%!JZpcJ?hUqun5J|7e|JMo>xM2g0@c7k8EtW3(g zhw(8J3w{D7yPFF(CSc+eXpN@WTQc90kjF?_J!|^QP#oSWo*R(8pk#i=m<&=EawGPg zGSpgP#fO5p?ID+I5mO7P^MVxIoz~h=1(7gC^Jj^sO5fT&!~>S`0zx z1l_|UMff=*iW_u4f1_HAV15c#cmQw9ixDoB&rGDt=5F@1{9^TuG85=V7hZqD z4CU+|RD@7sBLx(>lgbpzE35tz~1)@S3w8UMUc_(o!+b|>S+<__SE1f9L9T}~8JW~f`~#+d z?;#*u;RB>dp$)dWO~6D08^IK0TNCCeD==JqT3p{%KM#Fc8VN7gtJSG&n43;d#`3PL;#j&42jqHTqxVrGJ^M{{2I&;ogl))E1VK^6#Zv zSciWv2w8@I+6;VQsT9X+CKb>Aaq47mM)%VEJ3uWSoPV7jUY&wccK$NE(uHceh2QgS za3FDhKL7t+teO+EvDnGnt=wWYQzdf$RM2}jKDRwCk?;9MIX!ESNFKYr<1fY4v4@m= z@3P%<%-O>1+qu%?vXe%9!jq2zXFj&kfytLe5n#ZHlkj*+et>Bu$0E zl++2==Mk}dl{&!&KS~T$qQmwj1@;V1m5E()4X~KEUx2#jS5o}~nJ?Ba;at4qBfFNW zE&j>z*vjn1KT(|4|DsV8nq8D%5NPwUe^7_J|Hz^LCw$hz0kPu z!hZ{gRP$6>N9MF!Y^+A{b2L7B{v-l7V>eQT36Wx3AFQ$baWOl5%o!e`}od##Etxs8>b9n3g1)wa&JmbNxR z9K5BcN1#o8N&GX)>WX8F&*NFNqR7mIwY6K7w4wctmS!b7aEijw&eE-Dn}6uD(JOMP zn&XzZWxV8Co(<_FneE)FV_&UTDw6}5n`eErg8S;VEMNAosaQVuj`HpaT{ii}^O4RFxde6048d1&g!q2Fp?(jhx$w&Z|l;lDk~>``4GZjznL{OrJ_HROMOP zVtt+4!JcPnw7hZ0KnT_2dw;)Px~GX*V&LZ;{qgRlzP=MQb8YL$+Es`9wyZe3g?wtt ziiN%QY)D>?A|0%F!(f@nVXyxBDkZv^=y{(5Dov|%Lj5Wm0#`gEYqyJSVeh@L9iWM$ zrDu`0y=JO8oaa)w!-7$XE?rM%OeL4%zQK`7@yx=0bR12X7R~tbrL;OLT#hx(oKJ3< z!@%{z%8*i)rxWIuONBs*9ded_l*!x?km<(kUPGq&rOyMtv`al2v z-7i_e(fjD%ufac=)lgd61l-gCtQinr-y;7yaIlm9>;y(PEdBv z$9uBhu>WFXIQnIXf8Hi%Sz(cfzb~zqXhZs0;9>I50Oy>_2LtI9jDP&k9;^q0QtK$^ z-I|+&qob=XEnW(*wp1vqPS|BPy&tXWjwf*AZ>4Qt{oc9b{zEsdZ@>0nb4S%?%F+l; z=aj1Z2VeYREdLiohX3Ka~IZhmyVdi}{sGotxG)j>nOO4&E%> z^DE(c{pD*QVLl)+H_G82UncmoR@w6E=;u%trWv(3L)FGSYAKS{E^W6TInrvn5jA8M2iGrYzHATBjbv43>Q-*gzir2jmJ-V8KPlAj zDu#_hSr@|T>sLt@U)%58DP)?xz_Zk0)V=T{J=2%zFGj~lbLm;G*DyPbvKDrgM*3`S z{YGGj#Zf%}b1exB(nmxh&fs?5Ueis3nxxzQk-Kx(!JD^|GIW{FPCO@z+HQ?cyE zjsC(Cobi&H%ZAlu_80O5JhkmtFSMK3Gi0kh)|};T_3h;ywg-VzB^?ddm9H#SJWVnw zt1ej_&UnURn%0L0lznEJhxW{mFcpf;UK&mB=A$0bFi6>BRbT8@C3V(*e!_hXgCfs5 zP8+fZG-!3m-}O>>ti5dbL~5#Ax0u+}SG>MwGScwe^8?;9W{rt^hAdCVncpy}*F^6Y z>ksSe+SFPr$bN`eLtloLc$Rb$U=jQ&X2}9h)^d}*F)!?$nb|=Xea0Yz51mUL3DM_%PP00EQMTA) zXmytRu-8nNAxqMin)J1+jF^FT9yvSTv|d84TW!B$#_ZUq!}Bv8vqRFrmd&$+KuLHw z73WQtjW_|@*y)YttYeuTPjHY`)50!PL=MK^bo37KZxoZ(`t+(J$w7z!wZ;=nk!S-` z8T081Yh{{Gwi~13@%)9tOC&rbkjBd}1L3o3l8wa}ZENiRXPEv~S`{^=xK%iM8#)?t&smt4srJ^rLNnju{Me1`cGGuu`ZQ zgG6lHTH?Po5`<`m8q`2u*COcEsb&OiZQKqDu*l# zu6I+40IMWFt#hbz&SaXFkFziw!ytn~HkGGJR+^*1tRg?BWPAt66=c8T<*(^kMNtIi zJWTWceWA8kV^bF@4q}!wO{VVQQF?hT?{CmoRhwPYT1p=0>r<5F63n=~9J1pb97b=g zW4@Be>uP4Uhr{wH&y32=3m4xqPKhioy;nf*4=HI%h4m87Y<1r4peFos)Ant&E5w#1 zUpr~?k*?I{=i0S9XQ%6zEYvz*v$8qf+K630J<#lL`#q?1db-&-Kc}5u`Loi%ba$wgR69CaB*!AB;teC#NJiEo_~#T+ zpj!P5U|B2V<;#w$K!bZNR}2I!XVS;q%=+9sv(mTSx{^bqMsCq7V7p6~Tu5b)IE5Tu zEf1SPar9w!7|!$8rzTiMqzPGldgRlX(tcfuLRp*(AUyw-BXLdxi-^DP*b}Z|v1Z-g zN0il@qgA90l3%7YOr-X$ZLU=};wcR2wpZrb16!%~I^n~+#~aWnSEqK=FjR5k7(iVlii}85z01SySJ79TAs}9F65ai#w~0J z3gv&CJ1rkH%KCky0Q*4NwneJLe!4~6>Vq?78IxdCZI z!H&ry4|@M$g`It)#qM&EDQ~OP$uCnjd5O)xl5(y1p5Xhq?IeAI<;oXJ7fBNS<^R7C z*&E>t&&BBO@eCX3;b&wm&u4~~1O{g6 z@wGI?cVEaCi_yKLVqLV)ySBReWg$foJ5VR~RM69d@82t^hHF+;oX`BxdS|L+d|;~Q z{W_-6CIJr#d%OC$2nNQIsx9m#;b`5kg=(=@l2mpnn$s?t#U=yoOo67|rNw*nJ~kLb0(w5kO-j28AprMk0ljseQ#>*nLtFcUXapo+0Lv$43@C!E>ns(R?mEAs+}2k zU5@-$w$$6bHd4vEHbVZXX8RUnilhZXDc9D>+IZu6{{N@F?~ZCR-TDnO>NpA<#~BrA zI-oczqM#J1GZqFE;)sBB9I4VqLoq-I44-m5ii{w=sfZ|5X`zKM7K)V610*3RCG-G+ zgpiQj{RU_5_s6%^z5m>EzI*O{*UDP(Chzk;Wj}lGXP4h5b7iE)5)DEZUqXRAuUjj> zGmp+pg}CyQOH~I9-Jc#5`*`F@l7}Od^6g%ZiXm3w_E$}RjAjp+r!D@`mWCmBToew^ z)vz0u<-*fWPuG^n-nzGMc|)gV#=5B$W1D3R0-7*2akVa! zT?O`48f1t?`;fJ%Y+XGW9C>YTtkg^^Dsrg% z$b(v*hfzyDEVk8Q8^|jNT9ef21TFCGis7;&U#}Jcnn{>vOCmxe1;xFKySVfu zQYEWSzQz}ZFAt1fq-{=EBnqL zf7E(Tb}d=Q;w3%ojgiST4wY})*8F?SxA`^yv9O%jHsz;AK9@7>VQJnFVx0r zg#oGhoLe_F4`y2djhg0MYQ7B_oLkYSzQ+eO$nrYI+py&4=2ae^2W=Rk{=%7|P)u{P zDl>0wtA&Hi7lUAg`v>8gs|qqW;A78WM!O#<@IF0IkWCw|LT75L`|p--J&)GizoyV1 z_kLZS=un(>kkEhVMkHdXQ@z-hJS*gZD)*5F@p5#vXYk--OQj?sz09{MXo}OK-;_?y z0R6+0!Kl%eyI!u+%mkp_%>*Ca84M#}>Lr#U>+!7X4>Y|z^q%-6lU!)=4o)(_(W-o0 z_9jVf8?}U~+Yh-Q5V~Zi=1-h15`0`;=6&qW?0Yg9@wKhbrf+;*X}PPM`{rfi+{f{u zA*c4>vE=8yF?fy3iVI7wbL^cQIFk@)=m6=n(IeTY!EgY zB$(|C3N)DFqf=nAAwCyUx%qsm{_yA!gxd1nE>G(jh+}-dVWx$00z&>nzm8;-6()dG zQ+k5gIk~p{q}=wD?&jAKZ$PQ)tUi=rqA1flJ)o(@dBKuVO|kU#yVKz~D|r4TE_O=~ zGH7Gnyo3+R6y?xE3HN7Y>JyZSU%LF`l(Qbn;`fFx8mkZR#KF|YdY2EEsCOuW_UnCN z=Pfy@E$c3gi$7Vd*I(wxrE4bD#rj zLEFW5>+NL;8?K1eSJYth$)ASL{7rmc?XhpE_>2+oCcDVo;@8=)hp%j@uP-=0uug0! zlx7A;qIFNapTw+X5Yno#TnuT`BqzYQ=1b&vou-#5f4(pJ!zfxp=K#{@%5`IShv`W1 z57k;*%0O2ZH-h_gPHet#*M)k$$=g!UVmU?FNyS+ESG1s$kE9z%{l3uWA9` z!8y5@ZA(k8vk-}F`O4}_U=U`ZF|Z$PTDxU z^5b}faFZuoC(dN;?Ujr1QB-VSwNtA(cdo0{NWihmx9z4Rb%W#%UhX7#um(wv;^@F% zXQna~hreua&Mmm`F~kM8$HezBMl2Sr52*^e!^>a4_Sr4g0I$WI%k#?Y$)I(|pTyy8 zt3`E6J6?WOzup`74_^&WZO`Oh>E>#0IAx03^x>HBf`>r1t8z!KMf$c`dD&)qxt*4b zR8HdJPg6}<*$Zsl$wtz*8epQOfozB zt~FngwdGQFXH&%*Ro`Dp(E?qbdM>WWY=1s~wIrlHeM#ZH7CC;>Oe2WK67Tj>N`Q1d z2+Kmy$lTI~1-WtKxf2gO_{@hP23%<=H(X)jlMn+s-w04S3f+OMY&)scu7sLiGdH`e z$4C?P0ooaa2t}ovxi|SbHrpca0{fF~$e6sGwKi^=eh+!$GsaH`F=&mAi-__pM!EQ&({4RPkYaRhJN~Txm{4qeOHU} zsx9aDBV<-EH+|ke@Q_^T1;T{1g+ve>%Y>g6-*Vh?j?=k`e!8l47j|9V{qtY1iJm$)VPYWU^M zDJV$f@?GkNi<#FPj_5fQBfCw-G&m>j;hHL1(!f9=hSYpqWQL5oew#84)!4$=j{E~M z_Mh$67MsAzek8|b-k;fR8h3w2(X=78F(0>C)JnI^cPs0>H5#wJtP*<8r1Vt{FC9*e zdgoGE3z)4tN6+g8A28h+94thSJ?QMTh!qb&b1IDLwEy&*0URzr zA<8>~F1b7T@1L!o&?Qx#aOIWe048z(_?LLfk`K@%ck4L))%w+~p2N4mxF){3-(9YV zNW>!>^UoX4?2TicC1^uGN67v)rhR!JuDASC0w${Uz&RXki7rt#)#xa#Da}UuWxTy& zR>X?CqP+PL^eMR_NX0f$Wq`9aBSUO``cQJRxywREq61v);$hUE=u9{l()Cl^$P{}oAQUWafRK=$dmQ=DU}nSEMkwn z3{>r1bUqXcU?Iy7moA4wwn-*#esw%B*da-!O1ar~I!?|Iyhg;JwTRYxjOB`#V5*t!Kd8^gwLI&e^Td z#>&LH57l}8ToOw75{~c3Bn_@_M3~n##8Gbi{S@TOv{)B3+3!5^lj1GoFOY>!c0*X@ zLBI~FIbxH6vFL-nFf*XM0TiUxy(5H(iVeGw2n5il$07H(`MlHoZ?(&J(XG?t?dFPM z7m+rS_bHZd3^FoP}UNvJ-b54a}|JUkf{)~uRc3v#+myz68h^N0p^7F&WIWa z!%2o3Ht^$7B;DBw!yDtVx2m_|3$5d$6~9J){k-Z9o*>bPP$aY(6XWLUssiBL2<=;FWlK+Q4wEV=W%sgt5Tj)z>SK)a^GU+>HMn4X!d~(ixbV2(Rh-ogE;~8WFON z_)+2Sq|HChBT4PS#1REvE2JcQ@i}*4JSfyg#0&QL;>ZXqQUqpFUtUXoaeGoGJ3XD` z@2qh`1({XUH_s6JK14D6Tujl_(1;5mW~u|WKwpt9`vuEcL}QNIY7Zr61q7?oh2rDV z#SJ!C(6%lDhEO}CYYQ9zuL;WtWu$o!SKecX$|(o#?SyKlZ4)&CUI1Q|p@;-nMeGDO z4C=8l03U_ag>;+SR=WIPDB_WsBTAxMLTcL3BUQt>8@xa2jWzt8*moTrP0?tNXyick z*bO#53iTBR@@vPQ7vl|V7`Lt&a-)<+1Hmi~nJMGSN~;%#f`q*0{wrOSq$RE@W?TjQ zCng9zJwCfmPchnzbONN~SGl(&3X0p(*sJ+PW>B`kaPgxI6CfMXtYi$6x5)&DT!4pA zgT~0*{&v52nn#>2CPxR)T>t}`QJRsFa~(6YmIHVwfX7W+jqx8%!7vY$+aYdFFrcGQ zb{CK*F9igS<-n|w7zA}d+H(E)2EeieWH3{$YS$fC67}TX6ap&h`0}(bA2tT+;tM2{ z1BEL435_=VIe&j{mheR=Usz_>R$8~1P(UN<5g?5gQ&7Mk<$+)lMHWq3d-9e<-?8udA|AcP=&e;zBiXayEs?xIM>OV#K53$ zkY^6oR}g&YB2QBjCdv~eiOXQ#0ozoSfx6<-c;tIy*~Fj_R76FoI5MhULdA-;EERY8BmX7QEg5`);?9cz#_G&T(eIyO2rX&fnp+mLlEyHxFF+ znbakTx@^bNB2EkSSKi7T*4cv0_Qh(v5bL_XSmIHOviRFJ8Hsd0z?Kb|%ELJ=*XK#6|>Ln;j410siSM)H%XyFX~nk{Fl z%%*f%%HAdf_|{jQud1)EvZzLE{Ba`^buHNIV(||-CL2&!^0Y`n>}`0=t(?XyzYwWQ zMWLu8-vd{WU7i{B=$>ux!;WNYA2NVg(2bt7S2yk`oH&A`a?PO#+k?SRFYfCHxTc=; zv!3ikp%PPH;?q(U{0ENjGP-f&y0`aX1L=AS5Z9GW27=YMlUNKn?AUaaS?mUsu|%KU zjlyk4uD2@uU$1R&W|jCQ>0?GUCtruher3K1^=hfNr*HJ#W^`+0lyqwWw)7A*Wdmx( zm4+Idz(`+Yy22wC?v9fpXxtiVOHEXdIN1QY>XV3p3!!)s9^ZXvY33bsn z_&!nD(QhKoCYNXjPt-wpX1&H}UZ;&4DU$Ez)KCR?KA=o4S~>`l;wiZSr!n*Rsx6k6AR5DHdso{w$m<@cDH|N~ zPTq!c8MvG_n0{Pve_?t)?IvK^&={J<_=flgLQ&HP4?(1e=AQus4 zQ-9d8$tnay$X7Scku`5F&)MWwJ1vuNg;OEDx*mLt#tX*d6Mp?Ghm)mI!)aGPmE9ef zS}oLYp+&&U=W~!A9m6(^F4b0foU1+gv@08!Uqs&Ju@7`Gb5k=@pgMc;(qO26ZE&XH zr^zzM>{*)-M^uE{Kmpo4hfyD{%ZpWIcJs2VL*ygy5VrH`5%JV)Z$Qm!d#78X+v{t% z$6EV>sBR78!@NeSI5}9PrRhyrj;q6Rhh*jgSo_^STv9T5_V zPdz}xDD>|?(}$*MHibM?DGkxcxJ z>rt0Q_hYDOGTRE)Ndm^$4dniIMAp9yqU$ zY{VF5#A8pg4S$qb4AxR z6sq3l`PIO;2v5AYJzop3w3Sgu64$LCz$X9cet>`3jQ&@`X9N&55P9AHq$ zwbC^MDzo_de_NdXeKkQ*JR$CR2H6!rH7L|PzFz{-_Tl>XO!WA#Xyjj~fcF$Hq3V+? z?d{W4kd)@b`eM}>bwp3NfMMeW@(RN4FYD)}P67g?N;9DA*?cq=uZ_!qCfq8hUHp)s zD4AHDXGapE7(XFJGa%&|!ES{TLRKO2;@@65(9_rF4vF(fdi+!`oAhKc3*tM(-+?~q z9J>D3|5q*7=i%QetLeWNvHxi*{l6Hm3d+B)D&cf;y4D@LAcDSqg_z%v z>wkYQzBWe9^;CH`+{d~<+$@^^j~$pvw?3`)%*>?x;;JG_f9r^Qv3{sVRy-M^y#%j9H3-<@>(F%|xzL zbDnom##Z&ZvN>-eXC~f?;t(2XLQ$ml5ru=(#U3mix23wxKKEL#8iw+?W>-x^FU29r zfIE=^fH3NvJgCQ~J56JEJ;ybka3o-Z`gwYQ2HE#rv53Z2AJrprE_4T`<(l&!R85ZC z14I=7G6^>;q|XM?_M0g1ol}hP;dEUigJot_si=Nq5oV^-1aP#90V*A{Q}jgai9ywC z(>6#{Ix}!3eW~8zbB_lH2YY&Yo|$m{CM&(7i*YYuCY!vs=OoZiYvs^U035dDLNnD3+D1#FmLXvGRf)>94Gc5l7{6^RO2sC z*Suq9vGbihR_3?!-ujZr_sFr$lernG012wTv}y^6^tjczq#7zrKnnjovfLwGf5@*#V5-7rjI-@ z`nYvh4YzLJBv6N)Prp`fM)0WLW^-F?VAd_SP?9DfK?4_B+WB-u``@PpJS)B z213$i7boVo2Nti5zq!{P6yH|zL%!C)FZq6eH*z7)b>bDj8;H-RBhP%$uLMBP7sji} z?u4Px9OPlo5ozAST~#*#JPV|R^2TgRMvm6FttHCku!Nh(8dWnNb(2@z=+ly{J&%sk zL*1dnB^1N8#c_N8UZR%k#G34}Upsy=Vmgb)-^d5l3vIzlzH21}91$geLD;qVR&kh>{B%5{~uSPx)x6)=3IIfscoBMs=L+^n!vDSLw5 zQ#O#KcTv^c9f#mHtI4I=wvzEZ-mcV4sw*^WKe1d~xi~dbHX^OdnNTj}44QZ6xwQnW za@O2R3)jy>V@JVgc#rMh0Mm!em|9vTjYa?fiuq@nZjitKK#nBhrt1$Hu z@?jG)i}{%L5AU|9BP&|POQgoB;8V2J5VBPc|JBne6dQK5Zdf26-g6uZTAMXIkV`^% zioTZMN$v7ri|mQ{2MBtR=}nppy2&cZIg#KLs*8@^MyCver7QFO#4%|P?7~{^(6bXh{d?1bS2@4z+7rg; z@S$ST0$3G+dHMMm+Hn)NUMp+cPs`w=ma=EouW`=@1_rLqlzZ5f`=mrGhTnSVEu2^x zDQTF>_gGu*J5MOAc^*nA>;@iXd-e9WB}_%)B6FAXg*Y0HYcNnS@_e$C>-4gy$Req* z;sROGU~%}gOm2AKT5F1dDtJ>`YOz_a&`!&#>9Cy<(>4?`i~<0=8f~#-{=KtibkA(5 z?(9$kQuPTbd(vs5_)s-7>`=1@;d>$MC0$|VwM&kU8L-UF7J5F;+H7-eMqIYE(Gdmh z+25z8rsOd_+f($hW-#_PmFA5kp_O0dT<}&Lsl2>gcj1}kVty$;1)#H!aQF++dVFTR zBqu*~sKo7FnNR0;yBq>c$J^7AIiE%&mclw(cfW@^YoUX>1S zXG-{rNO1zK*^Ye_#;=wZrxqG@XO}b=ft8IXd8k+JxRsM&Z8xL7b)D-PZ`mmq0fo0p znkeCc?Y6wpZgk14v|jFcZsw#2U2|m@g1i&>@q{TouLT&KWIsa56oi^q{($U0m2WpNsiJbEbYCmisbO@ia zll;rV5G!bDR%ZIQ)>rn~`@F_-CdxyFD;;kng^MwX=q$rfx6%h`?(e^~Y5iVRqyo^B z8Botv2dZ*3-DYMbZH|_1SGUkR>5&;|5q|U27Hq(~rVCh6)Hc8=u3cI@yww4L??xyR z_t8TPlY~MsT$guoKfMY1D11BUt=)#O9FXz%ck`~SD$LV;sHKe?9xi8{gjxcAyL=W0 z3V&BcXONXD8zz002ecNml0|b{5|E*4GLp@;9qfZ?v%xf|DHmk#ve|!epnXh4%*WH&xYgM#MS6{TKp`OHkM&ZVoIkOt z1aGc7F^Mxfm~$vg#GmKtU8Trsj;CBm)z=N>%q6hZD-YTS4e6Vf^d@F#hQB#K;MJ%Q zr=m{MIG$D}68OGf*O?xx+@nj>>TtK!sUs>2oStm~aQsqn-XTz9Rx92TLuV6BqGOv7 z>6x0E>VRE7$(uG&T08G;k-l<$ZpP>sq7sRQ^mKMO@&obG*-%0CBURO2)Z1b?`M^h9BRqi`_~#QqRu1btF~_e-QZq zBzN<6Bb^gT4go*Vj{`(z8=Xr_ljJg37-lSXd;p5u>_^H@9<5s{0c=0RXr-X0A1T)v zej}G{DE50dsLu&Nb%5$L_M?KWM?U~^D;rc?*ZJR{47f2f;=+Xd;0_5sv*?Fl58BEC zf<}3?aw0BZ;Cjo(poPAkp3$Cea+81f+Q4ik#o3=&yVTWU8n;EA_()3Y3TdwY=UAiH z^@SCmSMQATDHLF}$4?Gnq7n58kPr_wJGp}I-cIbY^=>hd0C<-ETmm+KhahM+U22YiY}j5>1ut2nK>SGI`a z8N6+Ga*KM6j_G2lrA2#0WUw2B&TaZ5S}}8uUQuM9UtN8QKnpm%iJ`61W>084o8*v4 zL%G7B`>*-9dnX#(w_eIWI~>1et$K`&&b>0y;9a`UcLL!`)7rMtIs7NS%9x2FU9!qA;;Gj}mcX?yQKv{R9?D0i(l}ASedG>2zG+^qD z@fmzSpk$Z3WYT1{x5ffe5H64^zZ#{?Zqc+r!^uE}C*bpE+Ws67)&wWU~WJG+KcIA?tQdU1k4|q}N?B-f@mp6f>Vv6u)gb+cfvp!3f z<~$*QU(W7Q;`HXfd}*2%1O;1%MQ!Kge6QTyo+!)E8-3+NDfK|Kz68ZFXALlX`H;Gc zu%ofuT+nM06g?ic8*(YOP!|r-A;g{zxsYAK5XVS@q5jXXG1S3#BFVk=x87lJVG+nsOwU@JDf-dn GxBmg|n>}Fw literal 0 HcmV?d00001 diff --git a/docs/src/further_topics/dask_best_practices/images/loop_third_party_kapture_results.png b/docs/src/further_topics/dask_best_practices/images/loop_third_party_kapture_results.png new file mode 100644 index 0000000000000000000000000000000000000000..8f388bb89caa781f5457e90d250bad4bdfcf917d GIT binary patch literal 84484 zcmbrm2RN7Q|37>gWwcaeB%)=6A|-{)3T0#yMKVfuWH&^Krlc}UR+40um02o6C?t}s z?2+B`I=jF3_jW(e?|(ec^Zz@#kGiGL=lWdNdA{GT^*%3Y9^AWxnTwgCs3rUNDQQy_ zgC9lF+b}M~cf@G8mg6^iXNCPbjQH|mJQj%mXL8tQ=uA<|j*|aq66E4-@Xehr$_6fn z?2o&+9d)v#?2fuPoV0g2X=A?jw55}?js2;$;-ccB+cvMYc5!i#78CpD2Sn|iti<}1 zvQsE(Ewx`sUdR3Z&o7=PRlk1A%{3j`zc=q%u23SQ^6^wBgV?R{D}JQg-cboL3#jHc zmk8jtI%e=LrsY`f>aFD*nOc>--z_(uRCRKfR93mAQ}va1)+0YJ*R)!ALqOA*)0xBd zy_4>{d9O~lPkLmEvj1wB9b#3|VwG3=*H>yDSKS(F`MOLk?%Nz&ZEex1h3 z%4)aQ#Gqxl;@=l!ztzIR!X(?_?EWLoC1%G#|4#OEoCqJ=l+_=%Y_R*51+)~l? zR&7Q8)?f2wZ0nL1u(OA;U$&YW?dFg;zWCj{cMmPDU%$>mFW+uyZq7fZm8iq9>7XHTk`wpLid!Fg=0N+=MxicTm!U@{_6j7K0ba^{=~OMDSbh!M2a3A zxHnk5Os=&#$C-SXsj=RG-iA~)@8RL$Ee@@Fyk>^V#aQ2t^){rPXb@ezMp~!O9d{~5 zJMp9G;g_fVQ&UqvcGhahDXXX~^qK5n=a%!~{`~nf+hwcz$A-2eE?pmJsHZ1Ci*s;t zwta|EbZ~Th_1G}If_vTCwbb6LJf;4N*j*x{=0Bk>*I(Kz3;Fo?+<(vhs(x%JA;8M) z_3Mk9^@nI0FE1}m9UXcKk8Te>$7+Mx-VBGJ3t`mbDH zx7WPuL(CRQN&lNSSNh=9gbWWIJh=3US%GM2Y7lOu#gBL44sLD@jdNH7#pUG;yk3Je zTsux(;gmSuTK!bN2`|HGZQl-Rn-so-(*gup<*9AX#4i<9M(wUnjPZe=S)Py zOiN2k1A~K0pB^oyZs~OL$a--|SXGw19LNu3Rn-8|DB&`O+6M2 z2Cv@n)brOV>?D@z>gtKUtgjBk-}kRyzn-GzCOdgY`@gI%n41!=jM&AXudhGaQ>WIK z^Smk3?!0Q8!pM*JZ|_^Zy)Dx9F>&RMn>UZ1I1%L7UZ(rTW8=a5jK;>sCNEE?)Rgz* zZ$sAXx_F}Di9DJ9;|ka&(EAHSh9M1VM`0oK*4WWDmo#7 zF;+nJjS;nF+ctXY{!V9=c#hkmhnW{GTJ+qeb|Dsd%@EVdjYrzcf)fnWH$Kvc3Q5hM zVrE#zQ&M=AE<;Zv%l<2mqobqr`vd=4z#BjKeqNc0S3p_N6kvUtl@$;awCL^*`;aSF z7U%wI@$CBYGRx?9YQY>=vR+D2;-UBt5B4sj_C!d#s-9~Q+ckq9(MZx>v+MNcnVA_) z1A~co&Zd&Jj}8>)dHy`-i`i1L>3;zBwSotjUG>cYk_KS?RK{bBkZS^thi*!k~*Ktw{PhvZ1qJCRDu<7=~^#O+Zh-d(ojy$&aQVm--ulA{g`kB7>RXWBJAb?w#r*^;;V(xpor;>Q@L#`_k9s_J$-cXV{@ zkJ@YKGdrellIOvucyY1Tt~pW+hCAMF*|B5qmlv*B^(#46uf`h>e>GjXQGHK&$eI{U zfi2EI)EBSbz8E`@QtiJ~Sh(k1lpHU!KkZkaX%Xy@p%%}9qhIsAxh!PP$jx8E%kvJH zhC3N(vC>eA0ZU8CT6XGCm6DQ*e)^O}#5hyQ(J}2{yG`<_NJP8qR@WXKdIp9E8d0*8 zAK6K$xPe!$l!Tf27$!O!8W>pb*7#-9*4EZl{eXdM`^kjUGZAhrucCbCE;VJ@E1H?{ zVq+R)JMcYDNfGA0st{8cDdUM9%2|br&CJY1dtbKf3ir+xxQw#b*VnamcQc{vb~mGZ zMElH6u|?kdcV!x@SnsDsHUK_%*XQ(2)LL2D{JZNsGJpO&`TfU_*rUZ4L-X@o4jv47 zUgsnzBaIH|g6T5C!i!$I4V3+OfA8(jLoe2h^wcHCq^4fQK1<5z?Ce}8 zA+b_Ce6e`=&+s9WTz9Rcr0v_b-MUe*=Z*8jy}?`yqK~N!sfCFq$`njg$^E{7b|+E3 zX?}n5SeZ8}5*-z9Q~T)C3s>Q}sa~=BQjWAMHyzYbJGY9JbuHc?JUm=r^Jb+}r=E=F zORI;AFIc*CX|~Voi^|TH9A^gAxD{byW(zfMWa|ip)|_rIH8nMOc8qB$uk4lQ&$oHa zjx}Vv5Ao;b<;86k7Z;D#5xO28eraaLOCVGgzalJ#hKI=p#3oC~@Ww{pA|X*6!0VGx zefnDo6SuSrqkgJs>3Cn$P@TTn(RX*YK7IaNr`K(2)CSN0$mDQ`ZMjC|?n@s&Xrg>9 zOiWBPNI$-${>kB>q4x3YlhI1_m-N=Gw-%aN$BOyJftxC0L)1vs13ytUpk=EyX`S zr@xiSiizQijg8%6Rmsh|YE`UZ`f-D-Q)_5xX`hdq<#|XhT*7tX{(bJv#+i#v{_1~KRh8O2@-JYPCrGcQ_Grt_ot&pmo!Y0W`c3bJ{q>9!4d&D1eG?sFW=Xql zF^`OloKH>`M%6X_67!#)ay?|iUi;5Y6YCF6rXKZ3n3pg%=Xh0~_oadP(q>y~t!^0!hwc{k39xBMY|KP*F_gF&Xtz9&ab%dfmpE`2H@7%d_ z;!VN9!EbqW*)9hL?!jUV?;}X(!QQJCC)w#^go@^`etv$Bqa6pSUDDEemfT|acL!Vp z_RPOvC7lhnI&PFw>RoJB#&dq3K7A63*tBUAgHsN7cXzk0^s?o7U(C(TqcsIIO-xMw zdz(t)x{Fs~q$p+tK1< zlJ24dcelMl0~WTD9BrI#J->{Fjm`huIa;6~`8e&oQ4Z{VhEP@irNC2NGozy}xE{0( zt8rr#E8M!HzyDaF+6(uwl zVavzQas_`WbaAZdBNz|SoKRc`{2{qIKdo|bHl0wEqTDX zShD{DY^HM38~mJ|ovZQ59~-18oS44MCLFDu_^_caS^wy%Q%lXv%o3}x;+J!AU7j52 zY%lU(6nOdaxrm4_K`G~27?qWk1Fv2U2nb*xaKz4TiLh{T7*D`j#%pgowrtsw@N6fL zC|2xQ)K=Zk+@s&<<@Y!^Y`HCFHuuizeC@psg45jkxmSsr1g2Y6nPV{qhJ=(&47JC} z?mGQbbK28w{HNi^1TDoW&rOW99>4m7@7&=qdEvtJ)oWzQ@v6vRY}N>K!OfdXz*s({ z7-xx3zq>1`CbG)T%#>;IVkHBEI|rTr*>Kgp`{U^yUY_oIdhE@LiOESDyB#}r1P~Ay zuMwr9rL|mkZo+~pJkPLFD)|>3AD=509df5_-TAWj(!Be!q3+q@|=N3V=;Gcwws-Gd0QH6Fl7zVcvM^AL zOiX8iy}%e2qDvBz1E!!K)Q*bIvDC>O$}N3)2B0yqG}vZ_cvd{j~9LT zz%r2cE7D~SDC?+&#d)I)+n6&WKM1^97qOa?lS!Usi%Xa0Zuh}GMx8g=p2xMzd3#4b z@nfT>!CRF3(a;^SwIxteY42V->T*!fR#Z)LGd_M~jf{+x@*IEc(vR0B{q(@S?J^@0 zg1!0fhNk}0{M)y$0u>`j-sR-*_c!F@5sC-!h&4gS#KdHq<_nsI8cz)tR5WI1OFcHq zP&L|n?AS3~lkF+$>go<%ANVGwree`oyBafuWq;2&qp~UX9LhR%IHSOgVJWv?M8ujG zF5UbYzT=;*99&)f@&9uJzvq^HdFfUT_~?4s5_pdgzVA+F!o6|JKblvMcZ32D^2^Hd zpwmgepGVtLqlCN{I=H(_&CJfm^%<{Rv0`wrO))Sw5F50pzJ3+fR#{(DRuu_f! z$)Q_lCqI7a*nZJySoI!&JP)d}yot$uozDyu9zDn>HYWqcE#9uDg_FCJIACDw= z&g|9*;pE{F&CANjSU|C|vPN4~MH0y9<+Xdf|4Y!sggelMGFLka9pkcPd(nu={s-ja zL&cr`H4ruhjI$iq3@TR$g0Ka>C*wqWXJ^3U$D6r9pg;w(J%5QVT*mWw`GDN-ne*t? zWOIXy$PK>P6h#Sq)`~_=KqcM#WK-r6G_?iPyNF%v_jcHq0ipxURHs?&9sT)vCE6bY_3^Rc%9|Sx zC|Fx}NkqjM)q4*;%VqvkVYm9sxI=?z1tUZ*ES2wqE0xeue2fh|mq_(AlPRb=5ia!|Mhwf5i!SZ~+s%lZ1ML8}| zuRBfT@CzVefFE5gNr=^>bgZw5eT}qBA$V$QUtjQ5*(t_>{Ap2W6x8a~ zs|}2e!#p47ICm<#5=aKUyz0Ki zr>94?L!%%A=)6yek7vj+2>~FaHdL(;O|})2k(tfu6%xKa^8Jo@xVl9MukXzAoSYnk z0-wC+w|6@KC~Rn82Wz9Hq;~FXBhcM*ATI#z5V}usU7fDT=O^*;0c^s0d$7dcykWYx z-L41%MH(tzNogr5ov$k@ZalHDwiYnTJbCHeJN1k^r@rO~qrb)Ozqyg}17f40AWo55 zofvigJe^*Okv|>7(n9PP6%~~&pzpYx=OVv?21iGg92}k+EuNg5G{|+|>NC~7sXK@Q z#Vhok8{z%gl(md*;bMv+Yyl*?7zJ_2_xCSfqjE7+8f_o}S~@y2>ot~V*!mYmudkl# zO)W4-z4?@GwQQA$(FJVXq{HV04?SvavM=z@&zH@#|GE?f&84rAje1y;d-g01ma5r= zbL!$9_AS6I^gk*ik~3sbzEB>Fe=y3b5(#i#Pe$J4bvi4}Sdunt9vW#!60 zL{&p$S(MLIqKX+Fs7a1foc6!gj7I1NzjgflAE2tZySwizMaQQLa57Uq$S4bKzMhCbnz|*R}+Q#Z|-QyuKhGhwOqu$6i-eg$XHv*2evo zopd%ejdTgg$jE3MI0j@L*Jts2qJ0?_e^FVPip3mAN5;bl^4F5Wx;i=+l!I0g01m<; z%_HY?1+bslyT7A4@zADV3*XB#H0`QN!y}QG7^Cy4Y6JJ>v>S(4D z7v^Y2ZD6a_($azubv8aezR}tF?2Yde*^cd`%0nwz^m}%|cR34-AD5(%o8o22Q|1uw zYNnf;o0)@e-n$nBTKMYi+eLu!wT+v0@8$+aRs=iV)`%y3FJe| z>HNxqSJMf6_6TEY&C-CthOI}V#X5v$uno!)Fdn$vg~UWb!ja_9FVwX@sm027Iks$w zJ+EqXtj^c&`TL>Ws3fo7yg3si6Gi{-W4GRR)SP~=;UO(( zQa?6_QbNjfsIWe2Kw)7aH8(d`)qH%?M#Da9`muA=SWbCK$v)!^VL~ZNB9bVC7cO4> z26BVl^O56$cz;mRxHy*ov6sKOByC&2l?G)ycZM`(Y}>y5VpJ6GcKenqVDal9p$h9i z`RKTzYKO^Yxx(7E6a^_-;kp^J~KJ;G(DY;die06b;|A(Tktqa zHJ?#ymA``4afQTO+xo|et|6!;eB$COoN`P&=1wc4)GDTk%Yh_7YP$6!+qHKUkkRk_AOfkMvN+siDy()LS*Y^RYb8rYHiIJs=C4MtI2It zIZ|0Ce%!NvzR}N&t?mpKZqvQ1Yj&-INLAXHabl~}_kA;?wHme-^a~djH8rgU?(t7f zPB!ZBNwmVQ&;mLMUY{>Yzz){z{wAG<$A*Va6yV2R0V<-CdiLKz2{#hHo}Qi_XJsxo zt4w?NFq$%CBd+q1;o)m{?=tJ_H?VWflV?r6rTcOWH8rU?Z6^GR{?0pvD;AfKsGIT+ z2q?y7=;`U%mUlSq1QPWBoNm<)T^jIg^YM3g&YI?Vq)q)2iAzl-=n|?w4cPe8jEqvy zg0|k?Mfm2O@bK-n}Kt927mo32l%;u_wHo^ zouC$W@Du=YK=oAynZRN3Su)Dl{Uc5OTqsHn`Y`x|ZmX@%yY}*{ zxWPeKI!95j2(H2-lboxk+2A`MJNbPjD63lE1jI3xRjV#Sx^Arq6(%gICQfardM~fd zm93T)tPp8hu`apnpb=fvPAoy0cNl1q!A`Kw4tL)nH@vvM@MhZhQK`PZ$3*4Wz7;s50D%UDnp zF%zc!saYV>_hr4g2=I69Vg1?H7xi+3ogtJHfSM(~zP>W$0knP+QV^_ zfx^$9IUyMk(?j3PdxKrKDGzQgq&wjkr~58rDZc{9Y2<9(4{5m=Xol=*BDO+x(Ke1h z7{S8IdI1lm2+#-8j&#a$fytUekVT@40x=zZeT@&xy1`=^*k0k+ue?F*V)bVthusHT zX{ZDlMMHg*jd%BUECZq_futMf`q(&YDdG2`hhMA@MrD+p9X$kIK|{CDI%1QQ6bE_* zvC^(zU%uV0QRn(wExsM6nhQZw2s{Rbh3LFLbrYm@=FY09hN)kb@7^so&OG@F#t8*; zPd!R@jYWA#DTEN0%a$d9%OTi*dg&$xiF2X90@?pG0+Y0gIy$VoWn_qn0*Sx^3lik( z0|#*>IJQ}{00z0N7h8N#~Ab|7B|#5!k{H z*i?cig0%^!(cfjT|K))B=Vz8iS-sv`0n2#H-i1q;z4>l$Ir;0WF9flzCtGqk0T+_` z9a@;dzzv}E-7NS=rm|;(3F+5mxBY0*g`TBng$RPz6+fk*pb&axT4)kme>pGjRa7FP ziPx>EG$vMrHDqzufO#&j7*m$t-%f3;KJD)V(9dAQmO3u7{s(rDPqBl3W#K<>PX1)i z2qQT1MnuFCc@~z{tBvDEzJLF|M>K35#B9}0K9A4P6oRm{b=)}S{X_c=5i1ID^9RSq z{DXs+(90J>6R2yXqod&KiXmOU^jVLXZ2JJgldjnzBrGf&v;55D@UgdeafA zQ@a|26$0J{(@xR9QbR)nDiCm|*YN_tFw)}CnT(q7qRUsT(2n6~S$O8m8RAR9ZxFW3 z#REqt)T=PCmj-15FuDC%)}m~&*H9_DfT*a?B`p-o|9(YV&`Ryt!A^94$W_lXQw`E+ zg+f(54#Nid@#6%zzq0^CL$A;)90yxpVfp-1AiGi?9HPy}vnBdFe5>b~_wLL`FIRaP$DbnrfLYTkOEvT>bE(oH^MVL=Cx zj()*{n1qCVDQ4g+L z+uBM+A6wmAlNLD+1`Ag#qJH8#7jQTW2L~gBMk#4&bGXAs7KaObWEL*wAk7)(+P=DH zK>l|mtd^d%vpWhU2haAtjHh1T3~KQ6lb=bIeVUnBKOu7f5B=)ZWyi!hZ5y5}fje_w z&ev3=vL(+mdCd2P_>0j`sE23>3-D&(tL>2aDAg4m#F#q2a0yhirM$en86xR&(?2DM z#s?!bx&DN%v=0asL{0Sr2ihPpU}N4MvDFWOLTiCL-&GsG3}VS(<*otvIi@Ebv*oR# zh%L3vriLEcD6n8Lsoxp1*necfp?StxpcM}e51;$Xfq+^nnbd#y@Zqgka@2;qu|s^1h|I;dEc4KlbQa zK|v;b>t^Gr=Iof;B_Dsl9V~~n!otoT04HfbTf$!wkGA;$v@`9v>Dmw;8D_|tDR0__ zC&n7gNGt+4jAQ_CMSCIYsT4rABb^TA_*16cYG{6os5`sdL$kB=0}gl2!y>Yg;aEKot~ZZ_l}NB*s!(@N!i&3 z&!0bM#lzaG#j3n@>sEZK$X#wv-Vp>;0+&rr;vn{8cV1?%w#P{XLby83a-5V|>E+e; z@4p`MrNGw=f8OodbIau?c8mG@yOJ4mzmr^M-rw7yRt}rmxb1|Q7G%l%Pfrf->h^}O z3QO>SBg!3Dn%<}p8vNmpwY8o<@(`U^x83&RKCJFX)~8O1_P692r+6b&5vve$t)=SD zRz(-&JUGfLD-W7HJN8DRAr}b>EtU&TvvVu{G=)&1s)GH0{#2c=Yq-Dx^#W*gAu3Qx zpYQA#`?qi3vS%e;L2t(WBw`3w^|kPDCde#MQb-H})w6>q84^WJ74flcZIe`@02*I4 zH|G`&ZMUgWn#wsCAx#Kzr3G%`{?G`vw(s9Ba7n8Ac#n;awtoNq&@}}stww4bSb@kV zs4yhF0)h|4_*z6nFxCoHxN9KyJba^Q$cJ5D3*_>r`*+zS_iFzh_&qB}98MC)fY0|N zEiO&;dFh${=AA%z2mz=%R&+Q8#tM`&QNdi2MbOkE4HuYjMtu(Q7;d@s<`|4E!CzK- zkteD%PUw@$2lPY4!=mEixXb;UI)h=n6Acyh6M!%&V;QG7J%AOuKBby;TP@kw)e`(w zi}iX`lv&Kkyf9gUc}0|At^CoWeA`cc@{5j+7M})mC6HiARl}N4X}h2Z&FCf<;g)UR*Nd?XhRqR|u zZg>JoYlOe>zkp;#oMA#rK7BHPq|Z#kP9U0E@dwvHp8VQ}qE7-#B&|^$tBe{zZV9!= z!m}68THHcVxDp!1TCC~zyF08u?CF~YSpXZkcKh~4$TBCc9O8^Jw5%PI@NCI|?oKQy z{5w)w0YnIc+_}@R@BYnA8nIXeK#>aa@-&MVFMgN_i8jW$v+`5!83|PL^AI_RT+L?m z6@2Q5wY9;Ib<{sNATYsJXa9mh+;9hI8REj_ME3$}c{UYx1YQ}Gd_aK2sSl;43lK@r z6bj{HyIeK$eZ=#Sw)RIMIkVd}7mV;1;ohvw{yLV1%h|>l|GtP#Fn$rMt`w#~g zmqMKOtLkd4KHa<2Le&*O>!(W~iZ}qSW^}F&L$a+naQV8Y$bZ2vBndL@d+Hk%Nrqq! zLWFcmHQqf|;2XLrm*d^=aGTLSlz(J@l4`ldYH$AfK>!XkQzzf!9eJyurI4K2kE*Dv z_`u%~JG0d-8yXriczT2eL4(E6x8PaZzGLIy2n013WHPxs=zHVr*;f(+_S6JWM_W&i z(UYyTvp#oE=jo$ zu0`QKho(t7JiPQ9jX)mPuU+$lbiUhTl<{qtSc6S4q8%i;05I*?T1fLSF0Kt-9t`VR zeNQ)WOQ3IX8SCobhM@@i8M+j6MAZHJS0J~yqAA{ya^xp5L@}{crwHT76KQBM@ONn; zyeXp#A^X4xK)KEKBjcVov$HT{THlHlCA4~`AS9hVj?oEO2&%uLV$EKnl7l6+WPoTYymGb5v4R@M$!Xhh8gV+xyYhCBfV z8w1rEpz$aQ`9#-)t1g%2A(|)ar>eNPZ0q@$NC&tcEGsH1N_^1e_|p1Vg>vgOr5$v3R1 zmDM9-aQKKufg*}v%nlWV)QGqRyH}?tJ63@B#V~z7?3|pI2F87bVmJuL=IHm=*V%aD zA+q1uVQ;!Y<*!%&>$7*nnSG&Q$$Jl(U-JuJt`^uY38-yopF_0@mF-VfiP2mO*Au|8 z49)vcn`2}%^Zj{?-c)p>Uw80_9A_)*&+lh~k@PV9!gnnM%3$)Y3r_nrG*bIn=KXlr zg8!MvYrLtfyttN8*EA#i=1saBler?@Ffrkl9m)s~Ea%{usOdbM?kCJx04|u(m&@n(@L3+k+fy z$K#8po+L7hOo%4xFE&nsPEr_;BFGLBASgTmC@GPVOXXRv-MKUI&IhIO0`?>nyNpbR zg$w!DuYahpRVQPE+U;oU$6k&G_t zl(n5%n)w1`)ka@HqeBiGBB$_JcT<)aal{~$Ko3VxaE2 z)u2LhO9(}yz3aLO%v<@R3u^^wnt@R_8$P24Y%^+tTn+XV*U()XuLbAos@xn0g)QI@ z2a~HTE`jipO>o@h%a@7Jh}SAc7Kj)`-Zizg)v0E3=yI_{|p^y_p7;40$M~`snB`^k0Xjvc4& z*E@QScAbSatr|6bV}sfUgi?vU{UKVOWSYu$dQXp+LLz>iIS&>r*S+{pjDm;`)YGR= z{SdV#B^eTO^Gmm7pe(k=O0`X!goK!py#yx$VZETfv3Rt*HVvU!WOPs%Zh1aNj|5O$ zD=p1MJZQ9oZBx*_2_r?uXbRO&RR}i%`V^a4%HyjCy(p>=7~t zkf61WDl83RCuz(J7wE~eSf_GD^W}DVd3i!rK*Zi&85?XX22Lga4;kPpSSAv;nw^sR zI(mAmNM>nbdb%CF7X5@o%0Ev1FT)W5FJHc_Ke2$?zkff8LbjwLCL}jANJnDCr1K#{ zQY{s4*OW=T>Vr|ThjSLqD>wX8zjY01Cs4h~+idZgaOl3XmXYKXNf(Fut+?@~JOmTm zL2q4u`P8$(z6wwk@Q5d-dQ-38xY&n0JC~6k?d!8@+9fN+w2-yvFl4n&ILHb zW^|=;+B+-0f^tTa9mktK*`DxomJTj5ho&up|BC{fh1WtWISBRqoN^dpP?ZUbRfnl zz^y})t+lO<26}8)Z!f|nFmm_FWkK*mrh&^FnVAL7&Lhm_P1w)fozkv7t>6&SS4p9P zWyJhG>h9g7%!4Q|8RNL+bVv&!ViW+t{U03<4p)MX5I)8_yyc$#bv;lzsaxy)q8?jk zWM*De-*~g-#LLK6uU>sB@Rbua$&Qw+k56UuV?LaES@pmHG8JO`QDD+60?+9F{reBc zfpk_wQyv@{x%nhCJe(8E=Q`+~Tgw>+M1f0Ce+Kb!NeZ3JbF5go62x&)#&lP==KwAo zw%QXFb?Ak!-@Z*Q()}kgxV*aQ@l3en8sr+x0lFEPnYDNEfGC0&wPTt4D zd((jzr5=jglt=Wv|IeIQF8t%;!1$FljX&IZkR54vRY%G3vb`Pjg49k z5#3?r#{qbPAbW%fDxq${m(}`lr}0Vrg9q!#><@Zzf-XZ?&F$RO*JB>bJ3b%ZwQ(a! z0os-mtq(1ROmjen`?Gs(YSzyTzZHYNuBUceF2yCdvHmWU7~Mr~c4(tT zK!%2b42Il$?8lb>vSk~SWE6Z_s`<+o;>Vzt9Ibh{Uy5jK8QY@I4Adna+MDDd>N+`W z38IsH*y$}b(GX+C7x#3&nP;CLOaAum zoq9yp=2IA{A{vWx=RtHsjoz@;C~*#$u{&mE)w^vwb%kRqEi_d^I01KSE>dKuCLX?9K9L1y`Y zjV#KpTwl=(6^VoZ0jJ`US`j=9rXK`On&bsk=V&#eXQ+-0q z;9S8Uy`T`|rw~JF4$o-7sl;yI=Z!hB5M;%y53s)JaQgYRfR~e#6IRLTsDL&d!cTw*%2kK4?K#h*a`mcYIV4{9O{wy2hvYvGGJ*l0R|+uaK)n5ea~y zte~h^7wL_A5rQ!wuqYDm?jKq0<`0`~9 z@{(vUG8BbN*^5q5oqNVP!%-!K`x59<4B8o~Yj7~PeS=Eo*m5%(wUUZTkC*AbeTj{TnxEv80g- zv_3Eg2UdZFJ~-^^)vF&j0qhdz1nrG$o3$Sji!ngpPI`)|dH)CAukfvho@?Ub;$ol* z=x+D<%g9V|+>Iv-X%W#tkGOZWc)H9Ro|$C=ej|n)S5fB6d1!M0Q;Uf_fgEVvh`f-vujnm$r3R$SAxtSZr5v3}`16dtGs!Ml!d2h*I*%C4D z76MVh_6z`5)uLWXBN8!?k@($vIrul@qD9I5TjuQ*Qz29<5*7goB1y-LFSs}oK#M>g z25o8r>1Y}X$&n(sP-z&uCBPEfvb;0OH)L>dkll$ZFi;!bl+O2v4d4`U0N*O%(lO!@jC`SO$vwsP64izg=nA%dWULk=5vbcAo>{6P zWuKhk)?0s|S5hDpehtK^VpRUdjYdtg#`PHjeR3FEAddE3VGwxgT2$1sS0eTze`f)( z@h}vpD7^^+8b}_&m=w9QgaJ2t!8<*hmbMkauTgWTl(BW&iA@9`(rUNMb=Jj;r z)gPJsO*7+qLppx-hR{PX{M#-q&tbB(Q2(^if2)5^(~n2~pO05Gp9;She$PHe^T(+- z@4g%lw9(`L!l$;?*-0WtWJlfDbn$}`XP26tt~cCec(3v}%QlRiF&n6ImF;w{taN?S zdmc3Nn5CsmvhlovWGbZVv8)w}JA@=5tGjHgRw?jbkA@drA+4Rr3M(`5cAQUlDY@AQ zl0N`%dHw#qKU57ej81YfAUXtRm8F4p4WaEHK_Up-g$|fOwQm2xgDMF9K#93_>(&LN z8U^0Y&Hgfb7#}Z}9*pgJ1ok4y;{&7&L3N-g%!4wbX5mVyLOh7j^tPze6N5+50~6Pn z2x~w>Cb3yq3}|f(WROMBcVdtan(G1#w-5y$(^SX^Y8npz!pMONqI%;4U#~)}T?d4T z#jJA)M3)m&pZp*nL`{{Q87$WJKBQ+xU5yHY3AEWHo5jY)2F`gvR#sNU$Qk%4m}SQd zSfWfl(RGopEx*cJet~J_N7*Ro1~AyS!-2o-=SQjE*+PRL9K+!6WHMwFg9cmC+5RTK zt1g)xROP98>1FZh8H^YZ;!FfezV++r;jAkl;}kxYtcp)Z8gNO867d6&8PUHDcbf$* z^3HZUF33A1r$7>~@HWGqRl;I`we^8C?x9WUcl^*mTM_G>fF%9YYjRlIHL{K!ryc;! zA)w3xCi>*(zzyAX548m$g{Qd>nRD+ty%;!vOwjv5h86H#OjLK+xa|P272A;_3&b4u zGLjC5aS9eq68X3jf=y6;1w)hJu46`bIXk=l*_OYPOK8JMF&DTXGtl~xK^6VY7g%Wh zP<2m)ci?WZuehXyU;=Li$s?}9cma}b8u+yPKz{;31ehq|2UwQHhBaY>D8Z2-%K8L^ zgHn(#5h#QuVAp)$c%=y3RRkl0BTusjW75Vsc3{P^5HN(sU>_)>hn3)yyuISu*C*VUXvRP*7kX3sr(~n_ErrolBq# zzP+N)2GO>Z%<_8WT37Q|Lvre{0GT33PK0_xL@7lGPOa3*(Q!HGwont2_(~8&ZfJGq?^V^HU)i*-V zh+lz;1qBF0VV_klA(j}3!x?TFU;Wb+O&8zw+NAtPu&C0rl<>oRaWI_USH;DOIq#um zU_3j17&CO|H-jnkxfTWcnDx)J3u>$=IV#Nt=V&zx( zzRb=}I6PGccnNuvtuhNJroy)&#h4vS`h&CzOx=gtHvWxruzx{7La!b+G0~|2RlRoa z-Xhcy^u|J@twAG7aQlgUjJ5!T9?F5cqX5u=Z>jxSdXV$C#txF0!e~2b4Ygkjvc8V zMobN>qe=6O_ujqLFdGeQUuI-T!uF)4gx+d|aTllGeyGLv4YAEROSsUHwtevbUg2Y=J&fy?pwCe0yWp=wRg)_5jkRR;f=@Sostm>>8w)YH< z|Bxe+Sz)^;wQJ|j#9PzT(+{wgvc0Fbqrh!F`ROV1?|ogs?N6UQOZt%i&wWsm)CyI9 z`6c57`xDc=%WcI0$s%F<))Tc}N2nB&k2n(IGFpnRve4+c^o$H;Cns^j4F?9U0w$8= z8(N$qzB%=4AXPVa(}32!>(^t%Er!o*-Li!moCsjC1RPlcEl7x!#LaP#fR^k*HMK*6 zZmzCm)D!3u?LcSlFFTtNY%-Jsn+Vn9JieToplAhQ!cBI9p2T-nFm()BcYs;Uq z(2rNvD-bsVfSQhSa&o!``?^X_ z%>$1xMt}-JlHe^ZEyQ_1TWf1?r^7ez+`VhdCI;^qf%+1_wWl6qwpWm}Y4-+c55ijj zI28_S6>p zmffMle2Nzf5kVnhr@1*lQh{l}HbmwvZ0Gzy_B?^z03bjUtwCb>g*d!N_jZ}6kWdj$ z3vs|$G8sW35CCUSNLB()gcG3`K;}FQ8=P@qfqVv$F0qlEais(pzDlMI3Fm_hQz5s z0St(Q27$I1mfyA(H_UxuBWo?)j`{vVG!w9r4?+QGBu6pK`w3D81kvQvOf4AvibV@S zpeJ1~?QA(+A?!;Xo)d3=zP>Tt%EiU?4L8DHM@Sj|L^V#(A-5T4*;H^q_S%b3(jaDh zz5%gb0FS^+J*ufZk(zfAMT5jFVMXsjfx~oTqGKdUN$&XSbsm%=cf9cgLdxX$q$BqB z%P5j2B4gv2t%fL0#!Eot5Mam?S3%ky;*KA1_)%!g#0eRe%IK4TO90^Z5h&Nr~k>LUza_>#aQmy;^+-Bv)rE8dBd8S;&fa>3036E|t zO75`V?f8uzqk5nSC2<|)2F8*q z0hn=600&AylxGvtOvJ#%0)q?xWNZ$d#2>yRwqt8om%#$gKZ!#v-B!21PTvTPH~YfG zl9xo{9%Lk!TnahNX2=J79?e(*(bl!s_YET7QT+C;n$`INu^TA78~rDYY~;qfd{YR*s90UneKWi;<6()6?987`GQ;Pr_ENBR+^Rb{9?wt{CTQFbt*d*uK z@LmN;k{DmVf{9FbLkyGWzm6_Y2){~)cz}dXk&qDZG8ALS&dN1tn#`!ALSs-2`LG7D z!;?>v01zgKbm{{Bg5LkR>7&ivqdbKemdo~<6em$WN(#FXTY{V;LKVV=#u(gU_>LX2 z=<1lp5yHZYeyfwPkP~A+NPFGhY)FfJ4UUXou-k7X85=w%UOtqE01CVxfPG>f^M=N zagRu&r}i+$JZ>UBwXt#X$#p_kmH4&7<>%I8s13)#(NNH~&S9q(;t(L6JoKEQTyUfR ze#M5}IA}$Q89b%PkUyFjMS*3Ia|Kv1)C?aC{$*iMbAPMd94H9f(K~6CUtm9b8I?IC~N2qM=}Tl2a6c zzb_&<2XS=`9(FPE@~&P6IxF!$ZP3lI8yS!tBu8tcSdfuHI3rLu_oCWO}9RwZA$ml9YNU?B^Ktv!`Z~scn zRAMoaGdj@5@U0@ewX2Ji&dnS+*AN0BKa^n-r?tEL5}v6I8###(*If>{hyzV(*((1j z!ni4>?ko5%xO))kft+W7sRAT=GNp(4ldhrfzCscm?kq;-lKWl$b1(NTnmPDq0xV%z zO8JHo<&_hK? z7F2|_Z2a=H2s!O6QHPwCLykuTsB=jqgo>Q-L}YLn7$=*vmm^u#F;`fI?tw8x2&aeU z3geGOVV%kgh4Kpu?l&wzd$!M?+(=R}xCVSI=(@BN;M9{VTH&0fWEr5v4%gaJ6aQ&^ zB=eoQ8{_>jX5}FiBK#8!E5;lqoDiJ^$pM4BAn!6-VR;a1=m2tq7#t&l&RQ9nHQ=ci zQ&L38=*E}Cyju|HP--Q)x=b@qa{e_-R@-?kydSe@@}gl6&{VM!kiv1q3^FQZ7Z$?CODC(IgVy|g_Jt(nZXMeK@hkPI5gt^F1Le*Vw*QFCYOt2 zNXQu)xE7)$bEQ0g25WKKsb&t4jXx&}621Ucih&{-ztp^OF7O(Xr=T>;f-yjZu}5fW zTfqydLRbT#Tk(IcU-|yNODwj?`7u%C&K^!EXjMb*{F*Frmmwgv?`3^_1zzy|kDCvtfScEz=|hjI$=Mg9_c zH9OQ)On@jM-_{%YbCGKY1^IMbxPcwb!yNq$$1drh!UaMjWTw8MOeqBZF8DDA2#9Gy zJ*cjGiO~X`r{{50MQ;$mG2DASfe^#gEvC1v*Qf;m(Z6r6FG2Ez#o{%sAuND!Go*p zbB8YCv&G_Re@jlyUx7lmLV+>3GHBS^$rK`vJj8!8(nwwwlyDtD%){-KXj96t%r3;l z#6TZmhwOi-9yH^obzM^E77Te~x{DQK+d7xhAZ(EHFi{LHAh;(5fi=+!GRt3*<{=%O z^@unsgN7wO8-5Z&?iEtQMXz4Tb6`3W7A{#F8ID*NrN}8MK{dF5d1DZxieW&pOQ`3} z5G3*9O3)!nA&cGWfCa#gfbChFO!@?in}wXvIc@m5rNxxYx1M=i^__&ge<$Jcg$M0SuyOpJ?fH)WQ2wXF;y8ca5#= za%abz$aziRVB|Oid})BP<`=na|NE z#M{K~K0ZGIBIpKb{PK^%$A~|ypfo3E}$;lr` zlzr@x!fhs1@%f2{gLaeYPa>3+|DY<+j!FmxCs;Mqx} ztv}^BZz2L1^5{gxK(VhF23h)skAnS1k*kIEEWKCh6&0nLMTkZBiLV4x zz<-RKsdpY7@Ar6WtEyh6*5Vc>H|vR~_jG@@*ANO{kQjmmg~VdX`O>o|dtYc`8H1OYV;fYI<6v8zU3BiqS5-)3Ww7De>IAIpb|;IG*fuP< z&~yXH6(W#vZ*@Aqd3S%4)xPPesmnMaf{~Bs*G=#d!2*mh0`z~wsM1G)0-RCmh#A8S zUle7oQjEdfCm|~UX?E-oHuR%ah~`~YnZ8YqJ4VPMe0T_HnP_o3srBoXpg<-6OebX( z)?=vM-CUIl?dv1o@FZ5j-2+i8(76v}ob-j9>`gp1%Q6=S2Nq$y$C{t(^SwQ{LYxk@ z>&UA_rdxFVic7;@9@@%JBB0cfQYpxW1uOjxCkMs#SXo*!QMOiMi}=U{5-DT4;}|w) zrx5(oOf;_MEX24v8hR%SI55PtkU@FyQ z$q$?RH0r5bm46+lpR_xol9ENhZ&`-mp}(^Lpt|HFCZKzg%77+O1hd!m^J37|gYd&0 z{xA04JS^w6?fX8Bk(t6W&*cmm!jgzIkRhePSca8(CWI7fp;m?{!;(~@K{CZcg-BN! zQc{Fak_sgyqw9@jgai_adPqVF+2CCz&X2zIk@60NIu%o()$g+A|gC%Zj9M-L~ zesDU>Dy%hSuGRcQR@M{qJwmOgeD9{E)u`|Ei<3t#e|kPBW20Fbdwz1BV_-uZz3_yZ z5$TB5+74NIsCaYzPJWNrjcxv1KvVD9TyV$LX=i5kgUHMNWhbc(D_5_!DA(AZpyMu< zS1=C=<>C2=V>f94BIIckglHB4W`#tSduMOvaaZMh@EHO&<3!;8?Ebbu^ zRat?23~Uzk*)aP-SqB^{PX+~fPg@6K0sH$(h|*zcaP$kEP9x4k;B_hSTgeFyWvaYw z5K7b=H zu=Z}SkBbfl!go?ujqx}+Y`opUo2~myb-~mtaUY6K&5jD4PC25y+daS90azncAW32N zr>wXQpYIQK$fzGZornZaF#)2p!fY2y)p>6A&Dtm;0wLhq#mVUbKZKY;5bp)-fpg#ES~h6tiijc3m6u;uIP9Sc5w zUVDtgUC!2$QmdD-@*JBt#UzJFc3Z}Ij-G{j4G00;13ukKx&{eWrM!|uR?_~A%a@3f zw*hJ9)AT0lJ$=b;AzB6jXL)i^OR2JiZo-2^j&!~|T>sXsTi*J+HKkAsUw!%F#oGb` zAeZUhJ#ck_z9Ts{%j4is&Qd$BEl6vEz67UjTK=zt9Y-M6%n2WO3{1f0W6V{xmMypO zc|w;yDp6@}b`>v=;s$O|4aXKOMreyV4a5(G9PG&Znf9!!uCO+m5&NXHpd0{>7wM(4 zKw_~#b;+vgd$}*t^nuQr7oe2DWJEv-*lP%iDCPrBw9xm>isFaV)cq%h`8HML=H&FQ z%C&sz5)(72%49I*0uYWNq#sR<3s!N$2&XLgl=t`~S%+M*zf$d&`A~_LdcwK4(-TN=rS-XG8+E&w^3{`$^+w#KG$_Yd?K|Nn|N=cUcv^Jt8Lh< zoksnud0GFkWb|}(y@6^Pw_2`h&SjZHfaGG?G$WPQ;64&_OHFvnavDGlDB_#dS60uU zN)quYc0bJ|k5hhwSmC1f;YIqbG1fZ%bjb-ld*3#s=L!fSj-eYgr3&TRJ+Ha`Sv(SO z7M39cIqMN~ix`?s=gA;UM2GY~4-)zZP>yTj|7GNjq^s+okJ@ryV@*0ck1$qa79V~F}e(dl}868}XDABW>sLY0)#NqV&bTqfkixuCR!R{bZ_$ zqUyTW7)S<+TiQR|%fUx|C)7M=B|70gE+m6>6>#k6(Z;Vn9n3)bnHUryDKBDxNAzA} zvAoF^n#|-7dl0MTT`;MC)4z>MY3!s)*4J(a=(495kx4_HCNTW!qJwLDQHx3{EVUvO zEhY5toEx{EKAjeH42}`xp_=I9bV}TM0$wc^VI!^WEzX=zWMF3T&zZ^n0B9wZ3Z8f? z)+;@T#h@SX6cUt3io!0Lw*l18wcKgyOGPoZM@ta7g)g>n7J2EiaY{g-(6WnLXNEm& zBc~on?Kzm3qk4`O--2=;p)6U&6b#AfM6+o!ij z&=)v*a5w{F!+S~;8$7{zn8h(XWu;P}+ImJbn8#gl&^>UH)~ zy6P-kwgwQ6;~zWlcj>ZoGNt8HICc87bgYD33)RO36@@!V4l@hBQp1jd!4#B$0GMv@ZU8=0e3Fh1YbD8~xWa{7 zNAr&7D?GJt523zb$~jrP0;sabDswo87CyVwR~m;Qdlt+Z&U$fY-^k+0lyDZAP=Q2# zvBOc;;~jB^QJpoHoEB&qc!aadU+60K*v*g(JorX>Wuo5QX#OqI3%%xf?%H*eN7)lb z-E%>~wX0WS(Ssvgl`Vx`70xlpVgaH{l9v3 z+n6lQNJ+$^YI9rv(y5K#=HiCHW#Z%n%T*S5{xYaOP=Q=U(Y4Ex}; z*~avZoqw9!h`-~VgO6QpUq|B~*p2-rbYzOj`PyJE4Z6y(3K;vXls7FcLM*AT-1+&t zMBeoW!2yv2Xv=~FpS5dcbq`sh_;B4&E(=0|v_|6jgYrkP6vRlK3elwyeO}^uF6b<2 z-NDU;FK*Gky_RApm(AYSD?NGtvs6OKC4{853+ImI@-pC>715nb1PpPL^|Bn zUBSG9I6;Xf^lC+vehX;wmWtaPdfpBsdi2iRSkQM4Vo(tF?#yZE1CD4Krz{+7(R$(o z4|ddDlv|BJK42N**plihR`K2~uw8$-D1SRq8D-Zj zkDj_bA`d`RE}KEWgYrIDmX;&4MM=W0)_~)L{Tp>z-`j2MHk&ISpvW--7?q}kmha#} zoB4m5-Mx2D4cVdy07V^Mjk*E*1c_8Ljy<&>t)#2Bw>Q0Kgs4U80WYC>S_nwDKn&3X^}CspnW(w)Ed#^Iex4W=?l{-QR3 z#=VV{DYp;wp28LrbiU+$*iNTfLRAW;!9hKH(V}Vdqot-r0mEnE)3yrjg)d zN1|I2JZ?7fn_;$9SKD4xN4MBqF^NwO`Q=H*!;+}raXe>|8AuIPE!Ug%Xo<$6k5Fbe zk$Dq6>WS4=a4dy~OY1Zf8s+6Z@; zAd$38dvk|;Ek8d{N9Q(=(Mm*;`IB;j4*L0_K5WpmvSsr4Bpf)R)gwoOKW>7e)t|d1 zA^!iM&fmG+hBiN#%6$tP?$)M$xxpF;)uJz7&Yb^R81=a+TTuVy&wpj3ib%`|X-E+z zN35BCg&Jbn(F9#djq4k?p2P$2x5Pev!`AeA{^Es0TBgSds^G`I2HxnypARa0^o-S( zyU}c{zW(*xwAL-m9YSYZ?pJu_@2b7Qss|+#aAU(+Tt9$O<%K8|?Es4(*WBBwEZ{Ik z`n2wF4ipt-uG8*cgw)CFbiKw66RKC;tZr=h^w~3;^{OZ4flk4SpZTcsca90NuHGpd zbPW48%C3!%EHuhjc(G0+rsuBe*r9_kr~Ag9Wr6#~JDc+$jdeT<`_9uAacQOm@rbXG z;`OmlouB9=D{WL_@I|lCbqyaroVozYFx=7?gO-GA;@Or-8(4e~*h58%iGXb$2$eJ? zS1Q!iXSdmE;?*(cR8S}cD7&+}|5@C8m^{W4ZhVXPmNQDq+Lgut1_vv|rZzU++BGVqhcU4cv z*irm)7O&!?$6n15Xdbjo)M&Uoc3^*eFmJ{`NfF&W72OaIy3FhsQJiZ!1vHB)WZJ*W z6&@Rg)tb%eC_jcM+r@WD026*plr~7BB-#uT$rJEFQEixC{=$8v)A!@3%&O%Co4*#B z-e27#B@*IN2yWE(0^npR%cJBRlU&LmTDq>%VS9o42XjY8ld(5~m6!h|6} z>9UEhEzIl)C@8TLYhw;vH0(B7v}Kr!c@MmA3F1pE=1z(yk#dOh_Z?~g>^hI{OoK0D z#6V;4IjrGxX>SsXY0#^u84MUOnpc!m6gC5qq`B|TDjuty@b;AHGTTQAb1u2*V+x*j^+_@Yi4aGc(z~_-ya;tM)$7iI z&m_u-GIA9pyztuW%!!+l=q$;Astsxa?sl_DK!?& z#5o$g(-?qM-2x0gC7uVV>w~-iLY?@hegvKz?{jT+jw+cV9)CAR04N%FUqP^yxf=0Lj z>)lz!`7D|+#SKn+@h5Sk0*Dq@mI{m}^{_(H*`6<7y0kHv51YLA)8Fxo1Aj}ACXRRs z>Xt)`bN?2GCbZ%_Wspa%GyB=JmytKk4T!e5bZ;GE{4TDOXQVNHiVR`NI@^%#Ih0aI zMLeOblCAWJhX+Is#+gnCh02ZWr^#H}cn}FBDz^EZ;w6#?sOJ+PX;%2NwNZqloBgSV@I`lUZHu&esilN5_Ib}^Gvb$*ho2-O6SDRLFg(lHz(mihFXRq5TimbhSK9Nw_gN-OoGpHG;_jdeRJY?0jxApredYeUqDk2R8+(1D80Ozon8 zsxx9^$qkynwTEk)yNT3;_C@+xIjvF1Ai3;-S^}`!7?8|uUZbw54*~`w!e2STVY|2e z{nWl5K|wuT;>V9JSbgWrB}dC1>k^zzkYE9Lwvp%z4lQbNK|*sz741*VU(vO0kCq98 zp3#|}UTwX87ah0Fo94$nXxTxpLa#pR@=x7DHv~?PHVMe&-EM4hdb-WwT|P`dkl1MS zqu`w>_MZ*azi`b&p}URE5os-n6kCDCyOUPW5tXJ)+rK?dLZYd#h2WfZYGywQWC&cJ zX_@>tIWS#8-b_FDA21*!b!nvwVBK-qy1?Obs=Pc5L>7w|62Fq$nItyV~10wd<8Olh;twASI8b&ZmCU6 zymN6?h!#LtDVB!hf>HOiD-xx@w{;sMyA_M#|HYHrK9LWKwRhI}|r2DhFedF~sLZeDCT7 zpGSURVA4(6w&pmd*wMw?-lIodQ3NVp5jR1YXa@w=AxZykvFY8M*K{JFP&6@2|jsN+9IWqqLwTh{B(T3UY**z5ssxbQw zkl8Fm%P&)RDtjXL;M}THw4uxLA74MIyARq>G#kQ9MJ#au6u-+P2fCPcg;gc1@Acfn z#DxYJhdi^T*kDhWWD4r40}+ojq`jvD7RQc^PFS^c8P{6{@^TD?K@qxgXzE3gKT3HY z)sbEH_Wls(z|)-%*mEujXe!zl6%`dE8ioa%+^LFQqgaez;O8V;g4pF*$TAvKq1WjN zjf}R8B>?L}N#M_+4kU2Iv;#n+Aa5e2<#T+JRNx^)tixuqx2s6p+Vl@)C-~+$aMJL{ zp(`6#uf>4;b!8uCq0_ily*dNhnKsxe-BDUv>x~(>%G zHRgMMm}-;C+ItS>5X|}yxo_|t4~&#FnYJ$$aTu64abQ%OjQau%5cO5$D@Ffgtuwq9 z5{buamaT!cK&MPc!R>&+&0TZvT^u;s)Q}3u6I9%10+-gb388y9Z;R8!2yLj|bKls| z;q?L*->fye8-m4mlO0Y??k+7Q0kDX&=u-UoHDLH)OBjxZ$_37@G@?bGhxy*q|l zWj4q9F2mxK`(9}$wRZ+@MmO^gxQ3Uv43Zo%iDO%&x1MS=vFtALaqIQEgKh2g*T;;- zY)7Ki4n8#nV8j7;TMzo41G$+Xeios6XbYw-pF1~T`vkk0Ii>Y#ZjVFq|E|m|%DJj? zVQR3L*C-l_@Ok2zI=k9r^ocrVTg+jToAIDy;mYE{#6S&_?s`Sw_b_VhFqpE_(HW{0}Yw4tbl9 z4z_CEc^)mj9X&0-tjYDb4cSLv$keC}tGq9kk5K&+Vu1* zPb?phC&Veb^zdxP-T-Sz=I-LVDj3}m#38s}*m1@TIviX$_Ca%5$yOVRE=9!zH%$Eg zV^%TY4HhqY*J%CgEgY=a%=RF9GC|p9Nc<1=OBZGq2=O6k13EqLtyy#ib10pJh>#f- zRBxZ0HQ%{G%gN6aUoIG}3D_!*Je|y!@MeVFFcmX&ayh>fW4(h(oNiyz?~vGv&2}Y$ zBr?^A=fYs~<|yOxc5T~|=6@?THsJYH#1#yyi^FbG|H{m!Sillbo{8k3g%O`kiT{-f z^C2m41P0~xxHQDc{1*^Qsp;gb7ySv_n?O*Bij~-Q#2n2hlE1h(`v91GZrpxn%75MA zO_}gPGlZ!!8{_>L$`9!mIAax6`p0dHyYKYc>{q#ZHBE=OzaJE*So87dXQaM^%#^4F z5Q)e6D%~wJXua!lq?wr|{$_pTuQ~sINC_-+Y=}2cSHC29aoee(@ zpZ@I`2w3RfrO%guZKB1`K02MpNi>^sV56lxkA*0%GH3P`|Y zmXLd^19H)Xh=Db1>7RoZ?3NQG{yai6(9*?s>F#=nD>gbhTC`@+fogzdQ$oC-J8Awy z3$WkUw{JB99R4iL6_Q#~9;_|_JB|1p8TtU#OT8L^)1Je#_Nb8SfRoCCg(+>k{m*ok zd0Mqb5sPh{{VS&5<~;i%?n1n>i@GSr(sx0rTR#dmOy@2O(w-s^+`tYTM*&Rh(nkDR z11Ekrc;V>3gpB>>0xnb5C7rfOTXOt_UwLVPhKxh(>e^iDc!%Xykuq(2GmZ~$cj#Je zv`@W$8+e#}GBrMJZI{12cwn7|lm9;}_4H!qhivk6+_-VE?_InphBSb#kO_jWi{is) z*?(5>^=XDt8Y)^8E=<8R#6z(Qd?@%fp{iHEGJMsL`h+F!CiGH9x+(u#K;VaPhgPF{E?`mu)efdWQ~9y8}k zS&1L_&)Uhq(O`dV^+zMpLP5iEki?$3$+_+PoAa%0AO+wrS-X33xw0$uZqSW1i!tO$QGb zf`u4T;frz+|D*pQ!q3kyWgUT@jG<<4h?(M3+Kd&uC>_1$4F;eGHwA2O4M%4|yq`@^ zdcc@N{pW0BNwltt&ve>hPfWu+Q;3hJm|A z2lTqr;L_By2XF5*pPL?hu4lV>W9AxN?b5P$aj=W~PE++mWzFm!+`8JR&tIlGFOwWT zINUruY|Y0(?~jdmA3bPFpm7r+AKjL$a7@kfpwXu~=tp{nh*d?*x}oIVKFc!?miz`b z6iZk;dG@LRB(gd3JVmIPO)K-w2b(y6z`Yg1M4?SYYpT=&{r}NUqy~b=(>*sA(-Xr* z_U6;ZsKQhTQZxtG1%63QftRw7bgAp=>Pkw0_sE0AL>pOcMp|LepeuUy+rZ1Indw|e zA>fJc5pOM{OS^9=|1i`|mrc7hC3hSCzFbW72$Pgk$~Ky3I%z1XWulyvBKQ(d&#J0Q zr>?iF&S->o>iP7^ld;b{B8+;5LpiVr8_KKV9fh#az;5B4@4db-IKN7(tE)J^gRR5G ziFYh^9i3rb;gn8LSKhgK=zAZ7^|zpzZwV4leYr7!aWGkk3RORaEe`z6Z(*o*W$&`- zIlT*Gu71^n^N|n(SI%s3XX~98P8`%9YC2(p8mB@_&TipsHh<4MrF>bK`fY+Z1%mZ} zV`{r3mJbbZuH$*87G>=w$;*TlRZ)N%GL5n+2Zi;tDxTdtVmVDtwl_S&rn!S&*z;0e zcJ_9(kdi_wek2)y$inpO6gW`F=}ga4j^HuZN}3dVO&MilWT&=g_2jad*;!c+c`_t& z3=Uc*f777u0Go||&|h7CHZ6C~q{EIxcu3eX!FEd}4x*;ZG9iqV~^3$N&O@!ntm68oNwk*#X_9`2Zg6r@@NVn zExjkn=7NIgq}k+`xLv=lF99lCs!r@bthyU0-HLvETcZfN9&xb4FDD*a6fMb!~Y_l zMZ|kJZbSjQ$oP-r$B$dQ><)PH_@f7@BV&{9X62?+we<52``nHdB3cKraiQU>L-1uR zLJDQ7Uk*LUXt-D=+5S$W402=Xul?A`6Rh+V?9daE3MC?z7H^!rfNfluvt+@#?${B? z111uAi}qKqUX=)6s+7SScPj*t=6s(AK_sq8_Ia81UOl$p(FQ**9C(*Fkf5N*66n83 znaj>6`zb(WHdK3v`m(SUE*RLd97` zEWyMP^6-j6yD}H{ac^b>;=#um+IUTY%ndM8iak#-F~p=f4dzLQ9FT5gW&PLr?k;Z4 z=T=o#$l0=D4NE1cWYC2-T^)o-nE8Wb}_FrK~Io8$pNhQ1!A^#@IhV zfBcc}ptiPnTe)I&-6C#L99so75Vu)1GVnogdW!M4;Ad?jvU0X;<~+yKdL76)M#tc^ z9tpo9BUODy<1bl1L1#ZW>{i6A&7-Ij7CD@8MVeHDx6xK%Q4nT+j^SlpCjmaqqc=Km zfE+K<9Oj;Dm6xBd{NwlD6CJy|(4BNaz7xm(tEE@_S1caVq$@ikxLF+e2>_xIpou@$ z7CCxQ`gN%+e_0h8xncmUgIkQ8S+Naf%LzpuqMH-qrY_VF>X*ESTs58rU!fXUkhq3h zBhCO)LYimAEo{bBQ4qGUpISg63gsHEALTNHj zga+tT_jKwz=#qO$9CSlnh0rjXOI5lEDVD#*K057gL8FNqW_%BL61ICnpS#aes?7(n zx%zC*Ah zSkAY*-iJ0=(HwA5&EUt1i)#|hZ_=H%qC{RmT@%3~~!#LaSLHCcAC?hWLth=*1LuXaxq>ufyrix2_ zFkd3HE-p+qXz*!n-ILsJ14nc7ef^*0q5n=r}kLXdOwmS!1sQh4-wXc^HTfg=WCAiBmF`1;*>N#YxyL_ zT$W!-q0}5wEyj+_uhceQJ!IQ?kala)LoC{4Z=yha3<8N=#G|B~l3Y1>(4kQCQ*r1x z!~1VzFqA>g2nVD2H$x`aS(Zkpaf?i&&qV8u->%!;rTyT=CyJ}K7aJ>7kPrayyE!>i ziI6d_KEGefZQaFkH+ImkwGUN(xH$Omgxw62KX|YW7Nv)zk`w1Qo8nk_7X+969{)LD zUVTxda=gh_AO{&bpwNWLzCDG5-&8e&sm|J;G(W69|XXrZnUMF7&NQ~&giT=b|H z+hrI2573^>DC0ZJ?(DAWRUSZ(%xyD3KE$3NlP4n6^W3W=X4J5s1uaYR)0c7Jb~b;R zx;D>AlX#e8dXC*n7r-W*CCGtsPE-AD9i6BQWtf+nQ)S4KMw2+udGItHe0HTQ%v!Mk zK`SBRf_`vtK8fBmRLod-VYu-D&&6Q4mvlMGW7B8uCO+yO{CM(mV%J1q5DQ$=9a2z@ z0?_IVE!W=dhUv6Z2VQ ze}2z$*fit2;N+|YpDim=PV7o7SWHQG!jHu*DH?iPeoTG+twfjhp%Gx};gV~lxr-{4 zdlB;DcinW4Vi}2nd98P(#2+$Wu8EjyK;%Pz{0Xqr1jZw!korgb(zRsRUUP%vAPTA< zRJ#HWx$vxuU+cpf>z-)*=<8I|zmlHBS>Bu@lJo7#^bhaJJr%hsUHkd-=OwWYz{SS> zgFVE(_L{oKuSV=U=yyG?y9;>r3NYc>prTM;7NX)K_C&Lz8AIy|RgV0IzgkAuu}iI7 zx$*{H(Eb1`pzc<|9l^0!WMCL*ibh|(^ODcYWbG=IWgn_4JYJ9(Bx->VUmiynUOabh z-xh$!*0jx*{+~6IsM613)(3#BLxljfzV%M}U!_GwE)0=Fy(XSb328@1_i$=C`K{7l zq(rl+wgzaa2dW>L;rKY_tH$#sY-vr=J}i!(*=gv|%cd31;C{O(Q`{o`J>1=ibMgS2 zM`E_Hk8w5LTU+Od!j=c0jeme}huW2|qd7Nu4%J%T04SBoO{AKUdGrYh@mv_Q2)&+~ z$zds2jFI_WjEscF_y5#P*S(cvPklw0GH%yM$V3{NGSB#uUgdM?s6~V;&_wC$>KJc~ zExnh9*+ySI8LM>=LP;TU*+E;TI#3hKM5)e=)z#HsQ^R5B978P1^ptrpz6IMgKPPa>eWOVu^nkwp{FG2f1PDQD-!H*MUuzXDRq z>d?k%Q!GobUv+LZu_R9awe1pc+OKc+n^|6ZM4#!_yZEtrM)~WzxxvaxB-8Tbm_EDD zbE8&6bb6ZwHk8bZpcQ3!Di40^`;oQAZeAF5`Wl?tlLifq;&FcNM$G{7W|%BHj^p~^7BO@hn;SW+m~|crJ%um?M^O! z&b5yhX-DFUV6eI;0PMDVilN*!EN#_72lh z)kcj(9O`L`40qoH#Oe(V8i*P{=SZrg%Wc^3SK<@fNwMB(ZJe<+g(p@=J&}x;MH(aGkB<;LhR+@ zu;IWI`8NmlTy{kI3Vnr)%7RQ7Ao_FmQOP}%SrDv>_&3_oQ{?&K+p~CHcqQ)`-|$nX zRLCH?hk{$sbaGro34@kv`W$D7M`77`TWr)#v1fSima*ulm^d|~cTKTKAl!lCjCzg7 zMOEgee2K~bQcoer=mz(Z6TgvD#H62{5i#FYc~k&$ZqO0ngjc$4q+>@G%^nPPt?Up_ zD=gXB)zhIF@hox*>UBK|BboRtnr=F6@h)!^e+{en%)?dmgpw>-1aM{*+^TtV1@8>l zSk+Hp5F8k_kd!gesyjCUyms%gcm(Kt>NqZdBI@f|{lSAHQ=7HXn0UnUwRt0oZCBPv z;*P7Gw>gzQwvnt+$DPKS%<^m=OM1n@)9HMl$VhM&}KLNk2 z&511hJBVC1Q8DJhQXdlE(*Z9CFnLM?4Tt+M$q4LrfLcHWFb1<)19-zDc^D9$Tu@F6 zD2#}KuWiYkVadOyrxfWK2gmQgLv<7~Zki5n9t%)*0C|;n54--+VVFgTFXvy_1VhP% z#=9yp_sd_#cp`;*O&h8DNY^L54ULVACKdlG2~5Hj<&SR=x64;#Qxu$t7Z;ZqjYH2U zD!-ynFHgp3?#>3KMs6??0fberkuOVQs%579P?0mSr%28?P1H1ToN*|nm^z9U&r?3u)1m>Q1S6YJt8_$G&Y}(D3mO5V(_^cNGE)K+MTE(#$UTouJr%bs zc%vt1{%iL9YV&V)HksDvEljF>*1ZhbjdqHrs}aeL_2u(XL%4<90LLNP2@`f>rjn{) zd+$?v87CLQOTp;?*6UHf%do~?X2(vK#KEO(5O9&r$a44=2L#~lc^-(eHsA=5)ENX7 z8$xK?^c`#B*#;G4Grq;O}?+TU$0XwsrL_S4cEITQ}RJB3kX zZEF(Xq~limn1hAp#3sG25{sn%F&ZhPOoxJ25pM^ni>*Wn>m4sg50`frqF~a7t$Odu zuTeiO&A8YQS(~Zdf%e#rB<~QvGbM@jyAB>tGcv?BE@~+5YwW=vDEoIpQVO&v{tNC} zQ^<~>UmWYs`)@^QyW)rotE1|-L|rs;dv#p{_F^*W-2CcMDeWxCzMT5PXLU zL~;;F3+m$9yc^&^6rFYYPl2SKkXQM&7-Sg&4vUOMYw z%3sCtq7KfLKdzvMY(Xh+Kn8@QRS0xty6h-(rJ-4;GSBAq`}e0UDFgcX@G$HUVPQ+$=4r*uucxA7v*D!|p7rj+ z?O|vOwhF5@Tg0)x4&-~Mw*eGn;z(3VdK9$xE_V7xOk*@BdvpLWG4@Zb*4nmxcuga4 z2!jeXL)E^0`_d7fw6-I=YFf_VEhyVnOT~b#*arlcr8Rgx=wl{2<{?INVzi#W;EKU7~W}Moy*9^Ikv?DnjP! zOU;;F$+Kp_&3kK!rsMJ7XE-fN^E@#HpgBUt*1QGNWd|JC zM*(bhQQ2US>nF%3>Z+jg#kSq!3h%NVw+1gzXJ^Q|kTi0CWmT)JxHU^Ee|(F@Lu(mf zI}6dYi~x|Qxp_RBu@UK6O0{cOdA(j(5Bs`qSheEnZwX$imnD^EpnnqKlrM-ih-#MvMS(io9$I?@hJy_f=}0e2G?d)2BITuC{3=M!lxkDJzuNlRqbZgIkYqL4-$R3PaOa#!t*EF zUc;Dth}{imcN_fv`IRY)D$aNT>u6o9{16su9{77=Wb48H@NvrYSAE{HnE<3-_n8+q zkZGf`Y_mV~($J8G(?0DRLL}Izll!u%Mzg3H+bs%HCj2%KiqFcSZDCy=dpY^V>&WPC zXwsLe5ua>YvuTLlFtr%TooT`1%KfZHGm3x;m**V)>bN` zsw6${}whT0x(^vzdP7yWL>@y6F z<>KKu;AjnSFl?BYyN1$D5?nc}MalXwdeeG*Cf__e+C8Gs_=EZ)#-cqN&R5DID<8yu zA0ur&q@hL(kDnpR=gZ(chccsc^hZ>NDj?M6O6b&EXr;)+`aFGJm+E_8{oBSiR+_bb zyni}K!i*T_Wj?JF?WGJe&CJXsi>|&f`*fS8_5m9*qZ}zAf@h{0wtGu(uPB*MoEW<+ zgH6x$8QW0MRpd!fQa8bDj{`G`Gl#}XBFU+5yfgY`&R?;j%J_i@ibd_slS(nUO=8y| z8;SqSXg{p7K&WC0vFjG?xlr)>sgn)WIlwx~L=m1ZtJOc812_uMxCzv(yc-YA!8Jff znBBz0_@7UMVad9%$+iE2<7L?2Ed;k#K6VtlRFj98do6La_`pS7OGu4>CI(HJbtvi( z?>$GyZb$Y%n>1|5AwmcgU+q#JfI5OOSj^%8;GN)(|3yCR`0=*@7Qur#?6zSTkx|uv z1n^B#sjPT2Q^de@{bEO>K@>(AU{(}|kpHbNUxQ|MXYjQ%D3=9T{HUx4p_~!girGk;$n_i*lA8)CAV|5l$IQ$d;+V3u~J`N!t3H#s_X#hL*gt~K}pPxnn8FHYjN0QV^BPWE^DgurAQE6lt5*=J;Y@m23UkbF!`X|P&C3o*r3M#_V;oST2YT_2Lnxm-5a1lIV^;+@_duU*VFI#;7- z3BC7uHDe2)(5E~7ygO?zTymZ?*o-+WdAT&;9sZz&Jh8ned>Y1t5y9kT6anEWO-x7WI9EK%cV=jIg=m^P>NW*Ji^>%`Xv9|EC~3epqKO@GUMnKdga@k2z{1S zvAHM5IBingU=L>$%~1=JoS3CpVq_WfAYdGmuK&0oeM) zgUR&mx~%WteSJGBG*6o^V`E|2WK;fQr1)dHljJhWg~!vWP!LQga}C&nZX#k)&-eKf zrEq~V%9b7c>{-pvQ4o_=iFUvJba;HfoV>91uc!`r1y=kR6gbV<&;?Fs?$^7_=;0D) z%z@Y)6kz|3o*M&^ch{ktAf7-bQL_(;bW)l?&qLU>hml9)L4gb}+7`sbI&2vaCo;!q zi_MT05>3RGvyu=HiLn-8v*e8JJ=}mJ#&nrq7Ozbdq>2GRHeY z5AMAr^WPYnsBo#Fy3KMb)iJMi@jP?JI=meUe9JrZU(=>Z3I>RaxX0OFd8^rME~=$R zbQX21c>nGlNK0d~<;OOBcODB~H};=7o>>G!0KJ8(2NFF6{3?;d!mF}&;<YC3Td;olc`LaMIus-sZ1s;fpiSI*2Sd23%WYt8D_3I&kEy==mcLVk`Y z4l#l10ra_n%pESdcG27>@xTDLl9D83ao`v2hl{Q#gQX``q9~pv46^@Djj@e_F8h&iMJOk4UpU% zFgbNo#q|jiFqQJf%c*D}pGk&}&zV8-dg6;>6cZG6ds&%!TsS|U$rId+IUHjE7A%4x z>^dh3Y!(ers1LtP!4)=Q?pJ&IU9mLCbt1C0sf=nxXc4kkA=plT_6LbbqZC5@9xsUs zZyke+6Eyrp@}T-G)F$GLWyI-R+UUPCkcoO}e3r@Ny{X2T{8SOFIF(#?B_;k^2M6_L z$HV)2zB&T7V&GNAHKJ?2L6uD#bsUw1ZF`P>YGKV~>5m_~d3&qVv%7-7#zMhxM1oO$ zZ|_@sx;m#9h`QyXO>9eSYIE%EgDk}*B+S0)o4DJzk8>CB@#Zmr<__TUO z-NyM6J`7stwk|Q>gVtGc^h~EX*c{lsyD3lmHV8BXhnG_~a&p;BHE{WR?JfUGwDF%X z2Kfjw+ZLTVHC0H4G@LhfnQ9}Si1^MTDeTd!mxRYYxwNc3^KY8HiYvLq_M0N}dT)5_ z+i&mLy8Ff%WIoY)*Wy|9IoUl1F>FS|*{pkFIzl%G|Eb92V*%yDdVDHjV}KsQ zE!QK%Fl~5at`ry;rk7_%?E=hr&5qbse@oQ#0SbwXmgtusOj_ExXOB$AVXvGZN=L=< z|B|O!Ec4&E+H%~09+Yh2H_KchB1UW)!a>-d!C-{-NcOAZ_*pZo5rRck*m~h?=RTgL z_4&=-9{SZx$uy>0U?44+Fvz~flu_%P)HtY<8`NkV&^={546^&lp^p%AcEIW?vtJf3 z4%M;PK>a7w02XX6d;{@AVv`UTa_YwwpuB9Czp&sihtIG7luO6k4U&5^VP}N zC6l#aoF*^nICbJGVw5<7Brpplv4q%3ZY%nLss8IK3#%%dQS#%p=}8^dkUa(YbVzg+ z=atM0C$dHILM1Z;rcN!*!x!l!YkxPkp)TAo&}YPy$1H6T?Yj-}aMs6Q1EW)0A?J|j z@nWgjp+``E`spky;!#M(bGxWYmKGguVym1AklaA;#r1_ML1}R$rW#_<1@hrqxbUv8Y z2FZ_5E{G|&Q_+kf5DsUA7y~j^wf>XV22GY|i@fcY%WM>idRs@&h1I=Y^=|>r?GH~r zH+4Snp-`99QKG0)X3=H8DJT$=e>T*nRY%jq$fQ4Uj*(P&I&_S`sTBTn)Rw!~a@z)D zgDXNkYCZb5>iyNy>g!<{(y=HO&S3I|`4d?m0my=_IyOyiZ*wKC7veSBWXcw+%*`)Q zO|jYUKUqv#$v=OU4#2)x(yXBAKDA&IWb&G$hT7p=wQj>gX#T)%-XT`BY}i zc=j(~K_2|fmPczJd1nXhqsqzZ92g(PIE)%zHS^u7t!=uy(Jk@cSn|K$K|MknI&J{e z;`Pv5KMkG^xNPax@&@I(S{gyhFXYYMu9Pa6J|Dv!>T#N_*i%aekEu1 zij8i=ZX4Ivt6TTR!-qxQ&oF!o6-?uQdTX#w1X>*Fuqn~PbasQ&Niqh6F#(uE#B+7= zqK=!c-R4jAIO5LHcLY_8w|zEo!i0y+I0ix=mjgg=T38FHfdDYa54k`0%PS`P_dbrB z`*-f!9QHOOZ-*=#@lz1xFi#o_vB-m&h}d1CG|+)rL~~?TA1j3vnp@nabm^w_o6w7) zTaMfZCFK~svSb!gR#~+igAiI)BvvuP&pH@1ka!6ALe+xCohe4BW@3R3#rPYvj=LgZRrS9;F+FMI!E1eS=5X~*u}lX8+=#%_lMm!XF!!H`~emPu6fGnONlr1cSV zAB{HE%69m`WBSO@*XO0wqrQp5{>_21gFf*!#e`}+?`#xL-EjglY3L;C1*b~z1B2GdzhiGS6s`A7^ut=aH%1^IEk5kJ!5uQWqeqvt$_}IRD zkSnS9M8*bk!J)YW^H6BVdBi*basnm=oQ)>K3bDf^w+EMXC|L1nSuE7X#0C=bmD{4T zy75E`&Zg3$iA#X4Q4F4vngJ>nGKu>Q3p#)+$Jrxbcu2?p1fQH(%^P(qQ&4jKEvIfQe;*$fMH|1aj0qHYNc5#7NlqLn&>hKwsIcnFGug#+sb*bD^N7gf42tw2JIGP5H+ z)mEAGAH4!<8Ms=jBTC_(1ePaLc%Rp8E}?|2$?Nh~WN6$IA}>=c^K!mnHk0EP435fQ z4X>TL?Tq!*tB3R)2Qux&oUKpB&?z}A#YicMq0CU5NBJvCe|p$c>&iJm3018Ps_Mom z^9#acOHJo5@2h8Mi&}t&ic>vS+Q3=*;A(>FiBRUz&{HQ)yrQ16XiuDrh*RVxNVsAL zuZ@o;4ixvXJFUxZ++KGu4F@d)!4O*lriaHP)6YZ80b{pd?CdK?V@|PxbyoynRz-*2oPI|9b4EU{UU|@G18yR9L zMF2RXDE&Rw-+rV{z=n`STl2K7e3yyKWm-kSFJ-F>s3VEZP*L5WPp6fT7 z-H&S(Oyd;tXy|plP)7oO<Y{ER@1R3(D!t4aw!==)uKseM=K*a4rsWFcVSy?dh z-_Vrq%k)=exu#rw09PpN?(}mtImOk-qj?^x*Z(JNc~iyLAD@$gzEgCND0+u&=#b*1 zlAfdq90qgNVp4TM(BfgAg_Syex(_)US`+p(Qov&E+kzg>7z`uG{5C(FbRMg*TbLq^ z?S1lxUooUNMUmb6+l*;Fvk3toMU^DMknE{l@lgJwL+(H$Wf}INrVH2XAPKJilXzB{ zEwv5wM-;OgDt`<^hLQNb>Yx-G|B`6`jy_GH-EvQU@qZE!Pu}G0fK2B#IJsb8usK*d zuH22_%uG3`t~A|QoUX}v*OYZmP2Ws03TLzv4$0Bu!&Z-9 zDXKsOw?+mA+Y)=P@o2TYr^!uGVnX1K4p~Ktf&}fGCJyG?_@tIv;|_8(Gvlwn(Haoc z$;e~a&I3Vg6}ErO?3V}l2wc8br-=jZ5oxxJ6$S6!iRv-u95#qOw4?rLs}wG9BtGpU zlsrm;lgp_%`-UQwtypiM2$1jZmT^6kd@WX$)Vx2E7zW_ z_zt&C?W7j2GbLB}^j-zSfSq$%Uy{7}cTB zPAiCJNk|mq*RLCBXD8&mdr7N{Ua&qFP85kvEf>p{DJM578ioz6B~H!w$jNU~C{nHxd7?$T z3{F{_tA2-Td?4f8rAyXV>c1y4T2P!U8WA~x2}$ywyqtUwDtIZ|7@;8-7Y{^4>Tb9e zW?Ziny~lCuN(CM8Ui7^ zVfTb@%wyhOp=wgrUKF&T$ZjUI4@PEx{&Wso22Za2mA8u~VJq8-LUJ|u`X+2}Erk@? z_(#vqzF8YpRb3==J%RRWAzD=sDj3iCE3=bWXEbo4+>40lR5gd=0Yy{gkqD zf@@VJ>P3N(vkT}tTG$<@v@OrUM)?dDO>o+y3Xq;X^0f^$Eu zfp;(B>h|APAGYgEO1aMjh0jSYvG?+-m-lL92g*8FJov}*nqeBZ!h#e@C{|ytBaOkV%$F`s#dN3 z(WiP~;k(Bn6BK_QIYQEOXS&fXk3Kd3`!z_ue#mEcYxfe5P~GbMt$@ouiFMCniV`U> z$6!p}hMi+4LKu(1D}yGUpvGIFO?OmY_tC#}I=^|%HOX9T^xep1m6b{LPcHeQbWmN0 z1QqxqCSmH%;MzRZwG|6toRlD}YrYC@P^*8xIH)fb#!a zpY*@?e&||Xt);LkzBg2FP0*j!ce~F8>*-oA>bi8d{6FlG`0u??|AW`U|Np`NM{iaC zy|<@k0jF%dWlF#@zoxFc9-Y!e(KfL5{|VdpfA6*VPiuDPZtvv^g=Osi{Y&lgjOB{m zox8wG`}ErG1QK)fwZ7#%sW{eqd0qbGY@e8;Zx(62XrR#4P}HB==l)yM%P!uj8%kbY z)^Oa=vwdy8r_bTQ{YU>a9NK(; z>Fm4tb)&e6UuGU%WxV?RxrFqXA)mJ_@bg`oabTuX>Amu0nTLL}EB?Jlq2CNf#3@_` zXT|T|teKGA&$RO6be7g2`c)B-oMnZIJd`fLh*k6!!tK7N_jeMY5DsF9+fBePij}Lr#QWLdf4Ff zPHScl2G?4F=L-4Zm*0TBwO$-8U9{m>$Cu?^FRLoD=KqIezLEH4c){s69(G5sTrGdI-y`WpSf+1k!_?e{n+&Cwf+0{or`fjzieEKUw6MuTiPpq*1@RHz6nlc6P+uJ0~>UT zh#Hue_sg>_WiJx!D(zQSlzM!7bY|JJ%WYnNOuv3iujobz*Y;>szE#2_zmZ>#G;P|; zu3~X z|N860zunTG@G*LQ(2NT)dKGzb_dlDA9QaxD-oP1`dzo!8C=Sx@snT@9^_M3nCbaT9 zv^hs>P~qiO^NMF&_x_QkJ#yprs(U^B#&7ACup>83hfh1(bAwSQ-){v^$5ozdJ9A6= zEF<;8uUp^Fnw9YI;q7_Xu6ZoWzV=3AMf+cRfA5i5dgiyXk9p6MpZ)oxz}Z*t^Wk~w z!KE8lCyg-L*{4CL3r?Gg|5(r?-FEnz7R(|waw~0CwK7@%fLdW*lc8#(i^4BYa$dTA zeQ~z_F}R#P`A2SQQ`3=`(dYw+|%o8&z>6iqxOtib?PJx&)oOpQ)i8` zm)BFmr`a8=`Vp|W%&BlZ@&C`xG1zuu6aN(HM^VB3A<;CQ&wq;k} zG+Ci}QcX82Z(PjYD4hh8+%&_QPip0LeUevhuWuz!zKq%7?ozbrW&3B=5p7<-fA#9# zv&E6!M(NEQcT~-}@^Dg*s@$px&)1)yw`by(w+q}ub~ZB`{yf=St#4Gt2P3NiZXcfQ zZxeI2L3~8?y6+YFrEiS1O}CVocm&0J1sKq7?{DV(>$l>Ar}QsbkGCz_mW4diZ^MT{ z584jzJbqHuumWF2+ceGVH++&aqZ6KG`uxcA>(xv@NsajFs%P{PEaJ@@DhycB-nLWBD8Hg(spRdZ^See1>fWhza3bS|G-?B8tS z=?=%<57;x#w(9$v=WF6GIjnE9wPZ%7*XDv5Ys!a(EiM1@CP;hY@@wH!3)lBba-3xT zbkmPdahndQ)vNDZwmv~)mw&CSBs^Mo(-PySh-XvA_as2$}eu8SQ4lXc>wte0~zceJl?uXIpY|l0N@oW5ML^-F=9Q1k1 zK|9+C2_-?=_oH4KwNbC5I97W4mvtLWy&`&Pn-;HMvs9apdUiWiu)@+qf`I-nB(91@9-KLbFOVivL{S0&#!p*ZmjL+ zBi*7ZKI{8A>iXu*TxB#Mbls)oKXZ@Gc>bGn{*+efxrg81ZnghqdGwpd5zFUUzTNM& zdBl~02^w#oa6~9(FZ&wO!})LzeXlp6hs#FjEi3A{{*QW5k<(k$ZPs*B;|@cYYJU0U zrI~r_wlAHX&)q3px;@jc;{BV*h>5#(7cYHgZlXRXx9W#q^+!B2)$H3%mHlNg>+>`2 zXZx<1lowlJ#eTo9h9q%R zY|%KnS1)f|+`^}aV=H1}-VAjf^exA(Ut06!cXP(Y9RA_a!&!gFCF%W^#G7e2+Iam~ z=vDPBbHJD}acggtWey+WxhBxOWLF2hUoVB*Y8$*9v*S+e>*?VQ>Z_>4^F`k)6<^+j zubBAp>XPpTX^Z2CuBgmzbt{h5`|*m#H#Qu{zRyeF{*~?JSMf9;>78$3A0M|rAF%n1 zChd5SdJ~;Lq8&AZwVXpK=b6T`8P}aE@U=znd4d9 zXlaWVYs`xED^1Qlct4^0kEt_dGb+7nYpr)Ssw{d?g#DDaN7uL)m4$}6FIs={0(OYZ+-?mfer%)WN<0D?0rGNU3&9YsY%Kt%{WjwpyUmEKhZ zgb+gS&2bPFfdK@hMx~d4w1k?VNH3u#Kp>$92qE-9LXz_^?|Xjl`FOtkuk+6rxGqSt z_u8x6>t5?wJMeubN3H&PmvYxua+rI3%wY(JwqlJo<1Yn!CnfaJ#==?e-M(~Kz6ZHv5C>9>wZx=ix4bb)DD02@2|@t`<)C=%qQk-L3o5>n)*)_s zvthJ|x))-uGk`823ibU|e7VjUn_ADPYsltFh3z_IWaw^}IV1jnxV!5*H<# z5PnZ?rVJKpD{mPExw?8whj&PH34(jW`bC;PpW6@N?E6A{eixCcSJ#KR-K)}^lJY(x zwTsw6tdR|7Q)6Y{3U%<-+T1Li$PPT)^YquhIy}hQDjJiSq}#r=J8uNrLVhR){keg; z+y-S0=m2NSrE~l2_SS0(fpAlpqmClC$F-euk;xuuA9`U}@IUr@+-okbi_VR>c?^E= zRzDS=D&M8ZdA?b}G2dJZ6d8u$%xX2J)Bu^ympU4GHv2DzJFfrrJR~oYLrG^euK)Nd zj|*R$oVe9Cmpu88rcBp$gWvwu>JhBLX6mK=cbBBHxWP8a`9`ul6MRzWL&fO(Z{9&c zvrqR)q_0BIFxAOn(^ZJJxa4`+)Q%8bsEgx8DkT!LbXsi^jDLBCIzS4r-2@I`6?&pV zhKQEI?n2wJ`#&9bsPHc z!{Im0w_#}E8MG^$AtycnBuwb1)4Wo4LgwYL_II?0R{B^4gkoN99hLCMgVqsE-}&S4bEYudO)M+*I=!nvD`FkeZj|UxGRTi_+8NeR1R~T$MB6#BKT)aVf zQ3s2p8^I*-wua>2KD8 zX=rpwbeSgaEdd1P9c8vp62vG8z*@K=yWG)h*IvOf!LuX%nJU7=5UIl`bzOXI<)Mnl zgIzj86}c^sJ|kR8W|6iDp10rsw$~BiF}o{o`+l5&;rRc#{(^`SuI_8zYwIGCbDC(d zHYg-J*+k*}83kydr~9J3KWg{?@Yc6= z^<<1=er)c~%fv3fOnUxeiH63}HKq3M+@x)JM6477z!C_OyWzl9hLUEfAO9s;uhD^c ztDn?#rHgAl0i6nGjFe0PJ|IRiqYg&Je|;Wwu>GUfrjnhgcV#Vz>b|1jXL}xr-**lu7m8lj3-74y9eGhz#EpiM+|;(8F9S%h!WCB+{DJqr2iIs-?I< zlos4l#Hgw*=j{^ho*kw)D*#{m->O|7 z00q*%WMdu0_U&u3W7gIQhDSHY>52gA-?TB03d?HM7mJGqdkE+Voy=EO|G0KKNz+AN z@i0Rv6UokQgCy^Kq}ib=L%UOF+Es(r3v^dP`JnGz(=Y64M{ zKZt2$1kc<+x7U63=b9qD9``HlyQl=$R0RmT*V+Ux!KJz- zAdz|@{d;jEp=T+|QP!i$G)TP`(Xml0Zf!R7DT5M7S>uK1{Detcq^*=xY*Yac<7J({{>+(Y z-F+q3Mi%L^kx{GBIOV6fVUr;tWYr81_$g1$f)ZBGTfa*EXuAqh)mKlT^{%(JR@3W# zzFDh?;9nom(W#%KtaY|ZMFj0D_BY@|J>mVq47KrtygsEh^g;~!1>KUNU|I?&5WY~$ zaUyw;FV}6`TzWgpm)~pr%s}bO)COUt)(>{R%ft-NV=trX*K2vqbW6uGp3rQeBe$x2 zcaF4V>nX0C89=(o*~|FJuc%R=>zD<5zMy)(=@{+pN0?v^I2wDsBE)C2Lf9Nz9gWqm z+KE9c=;R)LDH{Jst`UL_&Ck542(tv?yiS_lNy~)tdo>nl1jzS(Jz6GC?!M!ETS%pF zGFycyTKZLe_gg$N){bY=u)$>2VbZsMxU)R*gQVS_AJL%Fxclx2EzOUa4gsc~5#xR5 zNtGem24;@9ajp720};vxhqMb=y|-yfN&K8Vdih z@=H{5Te?7qvXSrBx}Ky$NdzQsCC%B)z?hkm>@)74=Dd~cTEUd4AU(#Vy27o$z5&kK zYpmRaX0v?ddIr;uclrxH@gYIF171`}y5Rolpy2~J4HW1@wnC!RT{`&iXa7qKhS?DX zz#`UlN^{+-p|+OparY(5!5Mzpb?OCC#!25?g;TGWYd!7+oOHBDSt;fv<^&h5jOBf@jrJ1=w4LnuJ`{HT&k(N8o zANgBq<85>q3SssWZ3DGP=mz{vras}<(jwkH%t9&1!J{?d1ZB~Zl$GfaGF(zD>KeYJ zRxKkEKJzteAA&Q?4V)SN$7JUrLgm13X|O!l+o^Ng$* z2*=BlJ5E$O_&ojZ$sRYR^MY}Mi45XjZVxEUP#YX>!Dhy<2!84i5tEz94d-!R79elm__-{P=wKSYfe(~;F zl%DOfCQ1nqyp6ORWEEa~Eh_D~;Iu>}S{lvmrJWCXx)Y{rB^7VR-j$D74;_1q)4W#O zZ)4Lc$882A{K#1Y(yWx3Q|FeSS@}ooE&lBj!%`JuN=*XC)2I5+477xuOXD-UJKpD- zTbP(=U+Q{nDLz;IY%_1&s7JmO{`*?=F!|Am`dBUgZ8p6Q0>&)q&M5Y*m7%jz zs!3t^q&Haop+RyY<~TET8`vi3QYf||FhBF#r2-^K^7^k)jnsv$2Ng<6Mm)`z2uh1$ zpI!k$>hG#$=#!r9z*4!-78UYSee2iuRTRUeEG~Rkl*_07UGCgyfgBzN1GMxLTS28N zc{9M-`D*-mq^0`N_v6>m=BLgB-*ou;_UQj*8GW!4sP3@JAylYlEdlA5gU2I5{?D*ADByBl@MnE(OzZZ4V0OyY3f z5msnMn9O9QTg#T#a{90*jmFR`JKlw5jFIa7_kA?>qvR2Fcel7+y3#jsiY?S)g#s5J zeLxtruU-RJg(4|E`m zZ)VAlv!kcB)B}h;=zP}SYhhd?GJ>!%ti~yf$Kffk>{DN-m=&IVo&AY!@iwAbuEG}% zlDQj8FP=V~l{!f?ZnzTdIAvQ%YW8$nlyijAzZ5g1uo?@uc0z`-&V(Uu-`W^z)NYmw z#e6G7LJjeZI*j<Mak!MG!ZRpFcMauvPi8UY_y zo1b;3zOp@9;6c^ED)#C072{emHGHOqE1;$J(7x03h}n;vFRg8|;K>{QT)7Iv@&@-= z;Fc~#gJW|}SYe;%Te_cdIF86V1EPMPIG1|lhDqN{@#SyG#e>obcd;vk)c!B>N7D+# z(WCB0zeYlLY~17a(f%Zu+Q9_sp~IYwerPu;MM7h0=V1U6x0#7l8|lrd=v6$RsiLHh z(p6O}oO=8aL)rLgW_i`BW`pnjHC=(oN2fefn}o%T5zKGIayEgEsDdrc)W=lOM90B^ zlonB8EPis1?;g?HPfwEdy<@R5a%;@blV*tY-3KOwfc##x7I)E@!6K|l%`0iraQwmAkda0hey=py67SWs8{ z(9w+j6>J?O_h{;(Z>5Ew$I8_LoQ;bPL(u3~HCt(vKQ^bY!S7HSg0SRkj zeM?{3B5{F{5rO5H^_7EZ3CO3dNQ8(v~3#w!_T+q{E z7Xhv#_yzGx=t*ZBh4CVYa*%q;rl1UsO-yHTjyu~b8qCZ`Q`UR|O1#ePSO5d*f$8z^VxBviiJb*9u!rP1X8eh(k`m|oaun%ZuQE@5JULNOQWc@dbEBoZ zcny5xP5}_ZTbdbnfylOU#j61nbRRP#s_pRBOjiYLV${C~8 zGb8&Z)td0W&6we;SEtz`yExXZqi}?}^WJm*mQZXZ*wQh~AH!{@DaksAly=2v*5+Qe#?l*-lf2>{!p_Oad0*nz$0qg&GqJ1{RMRIaB76$ zw63F*Gw<{z=}_GQ59nwiMoNpa-OCbnrhnMoh-}aY#};ryk2?!N(A`4!#B_?I8Z)Gvgj1D+O6BOG*Kwmt@Tl z{#PB$)rTilYi;DU3K@COP|MwLsH1K`+$H4vwEa<~?deBM#WfSk<7}lIC!`IW$Lon1 zj|rzfmRqrl6D$+Nyo+*|v*inl2C$HjLZ2CS*a8H$7!+-B{u5p`eQX*?>lzF$cdg-Y zkQN%Y2qK-(s|Cth21fPgmcF-V-q(88Q6`pv`g7Xb|NLdhJIKv;sl*fIqd(ktgm zUNI6}aA~{qEy&R_0f#gmuLOTUT}mg&EiDa}>bHD(7|-Hz_ARfK$I5=zv%P;!LoGeq zlQzk30YXoe>zUO5vlA&2`nMG)TK_Q)+|6f1IucpdHDaw)brF%)mR)IV-Iaw&01f z&{&MXH`&jwcth6KEfaJ&FeuCfum*8YV5^Ju9C~Fo0Ck^HQO5X4@5qd!nrCh^MVGZ} zL+_Vs1<{|1W)_@WxhkeP82Bf-siK;0%PPq74eIc+sSd;2_E})KSPTMq5MG^y!N83P zfN%Ag5Hs$%?AUlIoPukn9iq6X;zjkVV10F!fEQcCDus+mQlq@B>^vUa}d~|Br@4Kn>VDvF!%zM zE4lL_w~E`q2GD`}wW#BOuB$U+l>^cMS1Oe6WZLC#pj|#y7*L2;#$rubmGW3 zMZlnPf5K3xX9Y-A?H2#m!!o|GrN)?{CB@)BI+pTjJPCwg$$W+GO8l8qZ*IRD2yc2(__fF{TvW_q<5={|FI(2+%Dd)zZ;)5aR;_VlUf@XLir z|B=NS66LvD9I9I`D9uf9+6MLpw70b-6va|p#(ilN>SOJ7F%^~!nyVE+#+3qIRkwJ& zpx;s!r$=y5-y*fkAPFF(AKD-tno`cEDGQQpio8MQB*H1ENMBoBMjx%Za^{2Jzlkuw z21v&V9+z@P9(GebEg4E*Tr2`Km$6C`_iuZw=aTQPY% zsPq$%VU*!-e_y(*3r)2+j&idHs0-&~OZi102fzV#_@%!2`}knwP=8p0Pxx%EhGfTG4p(D6_y=QkqyF ztqj2ZTevLeV!XF3JJ?;;zQb=cUE!R=i`+)w;?s%p{#*@;rUcz1-f*KPYNx1l)@z{d zpPgEfufbj`emMOna{C?%F9f2145T>FhRaVlNKQu3#GW6JY{uHvW48v#ClmrK!A*!iYe~9iVf-E zlT81LyhNL3l?95Syqn+2{9IQrywYu334PdX(C3gh6LkSY9{yKqE1?8+A#mx<;&fn$ zY-jX}DYp#YkJAd6z}KO_Iw(S%SM-stv%$7V3wN4bX=0wr&fYC&%Jt^{y0lOX1!mdU zp)fCcd{C4jps)kInLUKn*-4@&gvjrZ_T#dlz!d~I>r7_2*}?ZbA@9+~$vQM)Q2q`~=jMOuwwCzb5x^%I-!qC(mg`fw@lHHneJi) zuDaaN_i2QQHy`?B9ybS95V-o(`Y>nMxT6c#xk zQj$rGiEjd%4+a~$8gr!zDJTR6mY>NB^zc2Rmr{NOYhK_33mT3&?0!H~5rGRB@D!M; z28IOs21L62j&MEuINV7qQ(@}k6v8%}W_yyIw(Gk0W#&W#eDV7>xOu)HYYU6hXKYNx zhOpyAV1^1dBc@kO6qYZk734);=}PGkS8}%12Rd!KEDJru4pQ`(>g^gqmKyheTVN`z z^t_^;?4^zwnT&sIb{yBBMs#3S$2_=cL>Uh`mIlc;#Dv+87zx}hLcVs)sX|&PM#k}^ zCd+<)wNvPkw>bZsuX-|2tPm%9TxqSgJSL;BJ|T`!Qsa@JKg|S>XJ5YXq>-Q)DxX;j z|Kv2jG$|`>S~z_D?sUM2`on(fs^#@x#=ab#D6Ih+x12iyP=Ow~@QiD;%PLikdnXbk zukSaSec)yWivuxnOq(sW`$b^u^$Mvhso~oGMfuNQ{Ut%U}~$O?>Opk&0vLa}gTcXResw@8GS07%7{ARHS{Y266SM++d|Qrxr?! zc#|o888>=4MkU6=(NUzu<48*jvwDnY-EUaT1n7}!6L4>a@s#PVnWJy2w}v$ikw}6CJ+aiFKbB5 z-l9v}bdnRhUf(V--^8aF@sT{SzqPnm9i!g{rCM1}4|%@Tu;=&Epw3^v4{ACm+~WKU z4pd~)s|YXiQ=C0$7Nha~UwG?Qvtl3dH2qWSG{IJ*vgLdYye@&X$4EkI^~ug(EHpt1 z$Gv+nCfz9cyP!!?*g_7uS%#p+Zm=#?vxOcKd2I-+Y9Xi4G~qx?HK-6M(njrWXALUt zk_(LlUuDK*6f+FNw3o4Ykc9o$9lbU)tWG_tE<6QpMA>a~#5>=dZH4*gu*8Sr4`2l=ORRkYlftKUxybYrs@-L4qRVC)?U5knwZ~!%Z z{->(1Qt){2H=DPurgzX1AVmP_%W-}~|7Chdhy?ybMfirV?N>0azx?OByI-$Ce3vu9 zUd7qS^ANw(KwHnBmV0WTGyU3BXByM7yYK;`<%B=>Q$lJyvCo8*mE#-YG6~PuTwgny zbVg~VtX;M=t+m{Q-ra)aQ(J&!-*Qr30az?wc69|V8|&vv>=8qX94`T_Su>AK2e6JD z7H#hHK7GozelY#yY#p9TlYppCMDTlAT86TWM6RN3X#6c&S_>5&A)j~~rjCxeu%z!A z5I$MIW~ST&rIOD1Vnl=nsAk(qYY3sD$;e~ zAR!CUk2F16b@%|*OTylM-Aq9a?NdZJpMro}&lM|CQkIPx|$l>@-9JBW;~r7wfhqZLZa6Zn)t_Ew}3Q83D~fwykz z@lae&n?wj^R>-?Yli$zxTe?Q%a{pw|LfrY_7s4m;R#Bv_5Z`66Lf^XzNM%@GfrVVm zn!7K|4{&D9RX`~c;FO*{1nx3=dV7b&{F{Oml!14qbE&;-QyS?Mo^tJa3CGjatyB|B?Pnp7AZ zQnOopk*EYojSHO5<^kE0-^I}K6G$J2&NGjkEt&vvod5Og z1(0N&(;o!wg^7Qu-(hd)cW+3zodSC?-&B3JT`VFf=+D=(0B(c;d^A8oYS9pIRev3( z6r}jn-ks_qEAc=oskT|pmA!AhJa%hKY4=c}fzoBxy06UjqA7!g0s9!AxfJkX4OvaT zkh5F7fUBprv`U4k3N>@TWHG>$e8vIEjRB3T@nJ+_Gzj+I_5y3-R_$lYwU1};hVUpw zDK`+Q6uIL8hpZs<7j%aFEA4NKKpwE;T1lAn0P0ZnM_OnW)5eyMC;PW?`c=<#_IIO? zvif(np} zqI?rr=c9-JL9p{N+I4J_+&*%Oeg(>$x;W+DSu4p*Mkj zSl*yNp`bvt8kP76+TF5`e<>%seDN{=i8AGX{X*xN0q2=TyK7NIAdCMZ+(Hv*88!!1 z#K?XomVL+`J~MEcay|P!YP@?|Xd<(q(KnRR#5djEuDgtLaB1V6p7K2r6)K_za#%iE zcR7bQ5K^xs;K6@+kcJk>|EzNeNYmf7RgGX_80W*M3&j+!$PP`6F(3mZ&m=7kUh zWpL{yN!!2Qw$RuT!3e|AwPX=<#BONM*22&6$)s}n_5qXU&g(<-G_81#9w+S10i6Q^ zEd^1SudjeQtMTtOO<(5y3(7$xmC5Zta&Oo_um`H9p7d(}VfPx|x*McLR}cg6P_Iqz zIM#9RPBI!;vp?ZEFROgnP{z@~j_gQ(*AKLcE%B`*hl$xx3FCl#$zNx>Yi0TKmh`^r zI$Y@I5@xh@Ay&xlLoOun_DK%qHPl#Q>h!`}87@v{t}T-Fe3Da$?pTu=o1#kY^crAy zpJ=kb7tRexNxn~K^p`cPE<>~>5c^5EpFLQvPg)2;$|hLAOAh>Jo?w*R@s#7u3W0f};(I0WTM?p`gpqD`Ld(VFf>j1js4u&?% z3@x)!i|^~}I3g7Z8<|uHMCIc1-NEs_AKa~I)xE}}PA^Q@D?<(;Gp&k0=g9j(tE>S` z-A;mM+>bY;NwZ04nq<=%y*2Gqwj55X>NYFpYh6C)9LBHYj8G)Ibz><|lR42hU~Q>B zJkRQtadgsWZ+=WjD6!?qvgA2&LYwEtH%`qdkc>Hc(P!Ba5pZlN@grYT$TO;4_h%<9 z6P|3ZTZosk;VCbNK7UCv2!dS>=l7CJsgExHu~`j$(kbjCAKlj%g5(WSaOPhZ#RbF6 z94$aH(apctf-^2P11-%LEz_xiRs~VAw@_QyLx&KCQcGWQ}u5k@3rxH23;*@$Cq53g4HOi%Q;p2Fe0T72O*8Vj%)vcllw@`PZLZJ{wBs)jAfn*6VKR0Xhuau-N9kE zVw9gK5)?7Iv^MyusapMre73wiuHSEGpj^C?>qtu|1{)(AF#EY%8|wru@uLy@cRV62 zeZhAFCdN$=aR9sOn<%$9+mFR41w=e`t!NxkQsiWc%+0`qR8NL&-F(B)BlQs6W9_UReC(xg%bl}_!K*Ok z*mTH@_XJ3lc4oj$&MQ=l-*1R>?jPCDrk0?zVZ^-5n{NF~x6izFDY6O2O_vco;`>Wo zAMNw2^NY-`E%QyA3k>uv?SmA6x9?2!EL=_N-snxxKCMRzzH~M|{~-SSZ@Hm*bMe>) z>2lV0>JYd8%2oTTZL>XnkkDhGs2yiN?h0<%ey5gz7kX78sbujmWsG$?)TuQMX5}9q zD|2@4OGU`l-Gi~5x>rJk^z9l3nLC@NABmhat6%M<*Wyz87zN!m3V`gCEzvE^ZX zlKQUG*z@IR3YO?JVj?GO8Makta9HZ9oHU99*EQ~T%yB}bwd$76i&O{dDc#v$9(6j-}e%o0_It)t1X1 zvhRU>8CBkE`JA3WxAzUtAI=Zj3`Km%)Q&JcK{r%$vPzQ|w|(0|Vp!d~*BjPjSi(p# zKm4a9I^>K_X3T6nwn^GYx-^06DwPv=6tntDVW&JaRYBQt-0mEF`pxc<`nw9Fz3`N_ zhqF6hLaOU|PMqSoapxWrgf^|9fffPub{#(0rdo|_UxcuDX|}su=h&pZJnPR z4Th!K{|?!E4OL_)J62B?mYQFyg-J_yRYbOm;Yqspes(}4H-O$dR3wd{I~fA?w^cw- z#*_m0_<3{Lqc9OgT|?snV#HDL_)zzWR7tz{GxaMtpJfAjyyE`Ab939vX|a><8CFfG zGl)=fn;9{u4tDVvf;qR+?zZO5iRyu5~i zH#8t0y$#~*ub}et-%`s-Y7sH zbj&jb{@&Lhoi}LoEOD1>+Pi1gUP=y~={2~yz0?dXhp+X~-)*Mao5Q3T+{nkOoM>f7 zTRUdg+d5~Oam5$AYaQFR1q6rRKvCtN>ijP%glo5*Q~%T3_vXVwf}}>C@=km4xe1l+ z39N6M-Sr&%4`1SRN}JqaPOOgSFDUPCvb=r*W;F^kI9}_7QH*aWw-~EB$dNf|^s}>E zoxnTs2l!)b_jBjIg=53fb&DG1#qM&RO+LlJoBqj_44u%@2yMoyU9Yw#F*QOLa3aFF zel9epFWh7-ek!V$FHGSIgav~HyZ4)6J6560zF}po>;A8`el1Ub z2k}vH@e;K#*Zz?=nZVreW*x?V#}Mp*)zquhAxzW%%stZ_)4WniB_h?|vYuST;Yc zJonh^ZA8Kvcimx+|VKaeqtXWV?&cO;i;^L*>3W*=j1FLA)}!&8P9K^rDEg z3i721^SX>&r8n2^C;kj8?~&C>HG<6=nd|KHPyNv3!dMv9jz&$6-!Jy@u&#$Bk#NJV z(h(dX0%;dhStJIkFjLTL+IvnzS(cc&r`hhawoBf4Y9c~y@a+(M>YIjEO03>*25`_y;Tic?unKbnPnpCwRIVGYOWA2nBufjUJTsc z6BecX@rH__U%j}r91fe@ZfDvN?dY{=Vb#%TZg8i4)1zxsBJV?-pP`u}{Aj&#$`_k?^? z%|9zj6C8P>QwPLQQm10OTdU&UvnpoB;5uqClEK4_CR%_ly6Y6Yop2}WE}X_ZPevu9 zAjq?<_wwb~T$@Aoc6p4Eq?Jap=q>^^>CCr|5ad;5@l;Jy#jNufAFzem{g zI)vB3SrGxV1H<3`P|9cPKQle5mV<=EQjRLZ*H3d^%+0hI(|lV zeUPt`Dn+*mok>Z(*w1MjA3I>Epz{?|#vf|#nB=@4@2D^nwn)9^&4J@OCXf4;HdPsk zif5LWnR%cmy%a9C{ZkPZyc6+};vR|ds|+8l{XMavMi%_h^F-sVBOar(a@ogM2(KU3 z?*>Zy=li)v8i54vN;xbGEWNVeBUt=3<$0Y@ya;qpy)Vt4JYoj39b%P%`PmQIzk$%+@9%&> zHkOlSz~Dg<@WBtkO4iW;Fy=3xpNIJd5#weXs&4i^i22&2qYY=jvxj*2v+bxt`|4?0 zW|ivW*G|LTVXhrT(SvvA^$3_3O@6^UH^4VRIWRaVOXWnbP+{W{jL+gXi(jK`Ki)oq zD`fQ&GJM3IY5_s(UX7$cK<;xCfyV<~Dyji{ba&07)L5)Ovk8D+d8itmb-V1w6`(Id zrWs*;n4=&X!^Qn`0b+z_{f*$V1F--Uv{7>$czT$95t0}w4@`*kspPh@<{G5jrf=$2 zl-z4N&`|8?4RxLHjjlemv6`SIM&Xn&1$xFiT<2yJV;?Usj(MfrydV6fIzMhdF>JmA zPx_Lw-(AAGbQ6RbSOnF5T5B-yRCRu&Ng^36@J^q-EaBj$6auh`yicPVY_d<*O_|=I z_)KNK3Dz>?$6#Lv>MQ9ML+?UokrR5!$Ao_7S&9!P8=7orj27mV2(#U*b4 zP;wBl#}V}d=_Nk|+Q|z~>gT>&c&etA@Ty)^n@6&$?T+$J*9f=hER#$!rW=1hTr}G~ z@1QriAVI(i&Rl;8z9W<;;f18gekSMu2~Lmy!%`f`_zB#7A*Dy;SQpVGB>ai3G!EoY zOcE4wV+~V*1Q{hXbY>tqxfXVqRc5%TzXNias9ssm5-m#{Aar^MqYWcOgSuyTKms%a znj-b)WhQ)!xArO|6Ha5pzpk9GJ}z^1cCwi=;rNQ!GFDSHu)pCpacuf66UU>Vm)NuJ>ILi9T&lV3wCvW1!pK6rrmXWcc;rxKEmnl zt!$H8pVs=TAM%gir3$qCO4}a{``C}71JnJiAJ{LX@1Ad=m0XMjH1}-n4gMNe5Fdcr zQvGLxztM7x>oQGm2z76H6Oo4+a>>sy1U7-YDx23sfjS~aEGkUg_<(B!=6CYLfrOsN ztMFP><34Ms|Jd}##zHqe#U`CseObw%L%5fR%~EpdXe^1+A&q_E3AKELIh zF{9{hRr-~Oe-HL{y6K+*KwIaB#t+$L6P1EI&3B0>ecnzj8`tlVAs4^5NdJrMQEkdlDM4<0=3y`F0^3*uD8=X_tr=UtPr|d(=(-+jranqYMFL)!+~` z>K@r3{Nv<9)d^4bo}^f@x)C68f{eKv{&>{zTCy(i(1IL{fBg~LZK0$_6a>C?IY1^p z|A{Ox!gCi1Z+)|C86?Bs5`DMfx4jh0i7N8p*sAS>QGhR+D5p;egCkbk-(Gq1T`zL& zrAII>^;475La!3EZoA%KW1gX9Eqv)e&VS1nHm;lEV9cYH1Q0KTU;UDq*47^MprX1j zY~hx~*b&ncg%?1ckL677l!62fz2ZFq>tP#O8*{Lp8DakvnTcQ>!3sZJ!b|1IM z;hB5@np{zeO2!iam~x+%EWB50f;OZg%#Lt#GNT9(3y@4gai2&d0;R0bg!10NJ1oGE>X^5 zG#zmE(jrc~@BZ$X$Qx~mE3gNM*cxEr3(>o82mq@vf(tfcd5|9#Cou)0Z=W?eA5-%+ zMfP)UT*9)zIkWJfe6i0*Z3Ov%wfU9ypsM;(W z;CZ~Nw2|)YB2W&}7ab;NYF-GK>!GdsU(ePckak8(L7tbV;rYC(Ul%B#G}j*c;*S8e znhnq?j>=m|CGYj*qnQ~wjJAZPbJC4^&lj7O;;^cJF8cv2&Z6E|P0R8Uft=6?1%

r1 zX3%iKd7r>TH+ioHc>3bOowVAL;j-lp##lE%K>(I}7@v%j0wMvzP{W*O5-Iyx<1x?V z8c@qP>(1yHdP3lxZc|dI1A(@aXBXS~rr*A$k?XWhvirZR$bK?a2mEECR9{Cqojf{% zvb^Sats|Gre0d$yWMyvO;Pw1MKfA9UFXvDFpmc4^{u6JADgs#K&b6r1ywmyPyZv3n zNBrwv-3%r2E6|v4R3;u+6);v0NL1tWkLj*NtG_-tF@1?`B|&`u=gB#-U!#&Q*&9Bv zwI+6c>~NF2M_C-Is7@1ZiJM#C9<@wJcF zvgOY`|kB=c^LMg+H=nSeg>n)&&}S;E7@gL@$=T6QW8i{ zL`F$C0>CG16Q;zc3>)ooOH4$F`qwALo{e4?W|43tjry=LV01S;C}^MYbm3;|_W8>@ z;XYLX#FXw$PTd6HBpGyb9i3WsR>u3F<@^a}u%}&wkflb1CL1+0@$JGC>v{^1OA;K& zBO^ZKm^UTF1c_4nfDrXr8r(578%+|e4ECi35&*VtxA{>Kdo4ZPI#jIms^Ukd+1+=k z_&-CoOUMQ^(EQ78HW80END?ZVt- z728ge+4td6p*N1L|F8D0JFcnh z+26oYLNl$sS6^YBnZ+Zgv6%^D3G;Kl&%X12oZ%)Ls%&h zkrqU12*nUu=t+POcn5af-+TYP_bYGr_da|7OYZ&Ld(O-`GiPSLGqZG$AQPi!fH94t zKk<#~G1(@fI@n}I?^+f!n0?sk`-RuxIGH>ZweZ%^4qbZMP-^C$-m|IZdB3p&_I%_v zpB{<4g^EXY7dsqfedaw_Vq?+KwjUTSw$B&_$;+q92l6(+XvXAiiTIGfG8SPTzVbs2 zeueaG_^H0t4@r&%hL7sny;Fe}cru$%&rll3v6`E-r#pEC+D@W)h+-^_L(KOc9}q@u z5|Q2o4ve^>SFgOHU*^4jRcxO-E_Wrl(>rg3LDX||OOroqcTD!Ip6oIG&F2e>Usdxe zE6&7(lP3pC7E0^j9@J1h!`BBaX^&@BVpWj372gl1nB6iKDLk8>@SxGDw6udZaypTY zB>&X3NY+(Ehk98N${%{3c8?n@O%2xat+s871aY>`xoWh_Y*YQcj^b(R@uSl*A}*Zz zFNw~)%iTBoVs!?IWt9OJWxPz3@i&)zyq+EmFjM!8YKw7$?W$h>;TzmlKPB^zmyNm_ zpd+5imnydWKB z_!ViNMY-P>>!gLoZ4XmRd>xZyn`-@6C(05kUPNwLSdcRA&l1-7#uDAsgJ-sQHm~xu zAxkkrTaHfLX}w9=FFuE$gM07T9*3nhBD7yl6pwYHMn8u|6!MTQT<`9UM*NGUmS5Bs zvak!IM`lvZb-N`%jA&T8JXAw#nH3g;V%9lTp6$14{ut6@-u@)V{%oavj69w6qJ!pa zB)3{LFG36#qzG$ANAE18?sHQPE>57_^&RPC6wgNxYub_lx#8W^HJ6^ z#FQq}CSI}6zV`Nh1$g5;ydSsCdInshSgXk~fv>W~F2+2YzrD^dPvB~Ze| z3ik(4Nz5$6*NaatHUTD_8sc-&TA=LVgA%rccV5n!41UUD|ADjF=ameD-3*!GvjtoV zx2`7spn^cNVlrN&Z^p6H$cz&;Y!e}47+1j4aKQ; z+M=-OG}$j={mPo5CaPHGm7{FFRZ;eVLYIlEi*ieMjAs_OM-^ggoA@yjjhkw3;}s&- z!n|Fk!d^>e`+w}|@P6>_V!GOif%{;%+f6q5GzQwYjpy%!KD=cdS*(Ao?cz(9mu}-*&&2TKfrpJlb`+C8&Bl2^(slP{O;NfJWsfJFud_z^kiS z4o-TaxQ@75Su8AVgGn~@I?mEuz1@4jqu6i!mif5bDv>8sMM@DU6LMc2^g6o4!kcVb zFnuQUHEtWTIyO@R)}U!|M$9zkD;B`Nje@#AkXN0#oauhG5~-qeTT6_&=rpTF7HpXx5WU1g(Bk0O2NgRGrHrP%ha>{RzYy=`{%w}(Dt z_$Dp09?R2qD$A4xC?D=4X)7s-x$e#Cc5;u?g4^a;$R$OL!jmU`yrsVZQdU}O(kT?R zT$$P}B})!*#iO4fnN#|T%_}2swfV!N?{^TyZqJo=t{&heirr?WJhcgQ7*uvVv6 zq{}^D4fr?d>r1U|&S89Gac&~((ED>nS^BF}lmaX3pk$lfJE|@vt*v97R32+gk)}U==~Z+6o~ZcM7JT1886Gydvs85XrhJi_GSli5JNjg((Yi zz`0FG`^tiW8ddXEbMKEKypPvcQ`NuZWS(*BYKKh|y*qb?ZC1;;)DuZZNCJn%o#z9{ zYM*h%-fv4K$xg}hH6keyrS8KnW1G(rXq}*QP;A6FNNSYUYY7q!Snp=AWW0U^IN7XYq5o@KC8eizC86UWEu1o~AM~7M5+X z23HkFy;@Epc-Ua9{T!>Kri+Y_HN~!Ym=+h!3F^77iR6d~YOQs}vIsoT5B<{>aB@qi z@PCujcoTQsK^~H-+o%o9xLl3R4RKl;we|GU{Y9&v#yn@r z)2K7BG>6Ob=daGl6sfa;F&tX4g?j%OaJW61oy*p^FxcviIoQN!mx>2Q91OtF^E>&! z(9V`!yuTja6d@B$xcIsii770Lp0zTby{DOYLtFBhUt_V;xi%FjpACElPDMj}z2HyV zgd20)jL6s4Z0RP61e1!#!>Orw)5^t%(%p#d=K?J+@LyTy)NCgE8H0>6WDw~szOn8h z+;HngRh~y(+?u@1!c9`v_`nSk-Us=`k|TiF0rXEkbZ1{9I}Iy7{#u~KI=klaYRBW5 z4~mu4Ve!@|q!U>awGbw&6yH$WrWk@4PDvT>t@D-CS*0YtIC6^-6gs*M?vFyBu`+Ps z>39=7wW6N(1!c_Iv+3E{dYNl$!$0nxdJ`ROpcT5}a-nC<^JPwUD>K!bh_dlHQV>Ab_~OqlzRKMPLFi^pM9Hf&T5O&2xtQ5IG*hReoekboaEI?BK9f zuN(*tTdv~Ud?suWyi$9fRep3d)_v&uY6{lVplKO@K%SV0fDZN%+8$uXw@ z_|3COLG8?QM&x-}cA^|d;QXi6T|?>f*!XA!pdDXRlR$+W)4p@J!6UQV~= z3iGk#Rcxlnm_Ft6_LibY0{yI2XY?1<3H8qbgP=u4Gs5AuouE)A`Buf4I|H<8TI@i@Jfua3SH&xoZ8xnPJgJc~Hlxej1zdYo*$Rpvb6m;+fEpUf>0pgGuobF1(9DOe(Nag6WX1YH(EIQm+rYkz3zbgZa z0)xH6gfY}DUfF_|@TI6YzBH3jugHf@tc!FAp4&d6}au5QoZhK0C|9VL~EgSpez4rarS@EBCmA}gm%3Z zSIkGER~91-`E6rF7wzZgb5Lkb*Ov~R=hmJqcHYSzt_|H)Rhd1?4J8C%K}qKts7l8c|w0 zM%<>V)W@Ifc3YijrV@%{`K=p@>5k}+%JQZ+Z%UG9WZRL8A!{}~(es>pDE#qfT^%W( zX=neQk&&F-`prz2D^`MJ_st&|Hy}~6tA^}**+E0wyeCFD_hqkz zYcfiGP4VHMd>i=c$(ng2FNvrLg~&yzVKgn;{8CxMviwp%{nY*%tfQtwK3&x<#m)iX_qeT8m&l;T#0 zy@Q#VV~WE{Do#2w9mVKJMedd7STaIto@tXPjLkCCYf|*SmHc~T? zcA=dAv%zKwQ~N-Nxt`G)_FcltScI(iD-?RBHli^xw}&(7piu6>YV@{rWOltR!jSl! zZhq_gi~Ls8-7)gHDBdjGirc;0RLSvHMj2~!$1Jo!Eb=f|u#4o!H z8Fa+u$MiBJgFKq|^Dl$n9&3N5qC(Y-SM0Ax+zWqvhi1cd_;xX5Hh?DXQ|x--Wphi`4zpp9BISO z5FC2+7nSNABbkLmfmAa+6|-x0ms)$w^1ReX7e=nqVf|`Nm!Yz~ESxE{$*&uS;VKJh zu@0AcpNEpH7p=YFR~#0cq=L)%xzY4a{ut9_lec0_D@LF{pltag;~6n*XcTo)y34J5 zNECL%#=Q6Q@<%l({6QW6gY&((WMgiu4RDE1x&qJM8hmG@2$uU!Q22~6z~u?V2guTN zzFpjqMe&?RROqgfo49^FTiMclvLGp)Wh~Ht_;lHKZY#BZZ88#5=VA7ja!$B8UVJ3< zq9f)o#=O}8A1Bl(d%$V(<0dZfNP@tulyqSsgv07!i_FaY&qkKyNM zbp4zF#Xv*`Dhz|9!O`7VbVQ1jayjo9O%$guc!cCAe1u`Gs}({t%*3Sx)6Q$8I8ONM z*c__B9I4|(v&&?kzPS>Jw9X%<^YaSEgjn4ut7BVm23GR2n3G|;T-FmY*fG7~; z*X;3SR(D6$-VM0;%_oGWiRl2pvwhLWm>h6f2Fl6{#QZlDaJzM&_mg4sH3i8 z9{XQdHK+4pKskuCTzesCq{vYMg`1F_X2t_R#g^E`CJw7z%nWt|A<)lo_V=6eaI%G@ zR5k(udO|DyoKWvoHC&=9)O63*{o{KHhAZ$lo*Tb-q;ktfICm;=Ps#1_M(SkqSGuq9 zmMVUE>EKjpUP#$zUB!IF7LQbSOVQWdz=fu>aZ{o7Ga=kp4E47hh%HVj;iV&g1OEs zff7=FrFMp2{&q%J5mwP8Am#)LhpHUi-VA1%E7%{GBDkE!gxV-n;W$jNI4$t<2HXWm z(5TveKw#uh;1H2?_oG-FS)8oNy0-@yT<0ig)F58tBOAvFqdt;uUK)+W>a9tEwLim@^)vAPFa&L_n z36xk-E<#sQ-|oaY9@w#~Wh1HbH5FGla7x`IK+ATRK}k}`x=`86ADviYERw6s@40#m zoq5tU@6Gl;al&UPmf2Vt9Mxt(2-VT7Oqw*o0WZ~cc%k?crg_*?p~Y5pEZj6-Ci2~t zn>uG4Lf_2cZ{adC^mQiNZjMwQ*aMsXfIQ^enAy<`7sln5yc2?qu;X>?6x-(ew8w27 zYNz==w-4Da58kFK(z2&=S{?KI`%&e#1ZL(@ao=01u&%RnN5@rETV6il@gHoz$i2K7 z2TL;rO#b!y2XTx4g!7cMLs!`$CGucjVq|1Qef=0QTDH7Hbr-L$PAQ*US%wTb>Qa4g zjCed9jk#7uB+z$0F1E-gId2pjYM8=!tzMj1xlNO&+1PB?3Bc|t0G8Ktzno8OR{r_k z!sWhj;wCnG=FLn)ev;Lu#`!=ek+VJxvp)Mm8-j8$di;#G8ye8T*&nV_nZblo;Sjh~r$tvOv)MJdxZBo!a!N3go%)seq`=1{;C29(ym5b*AxwL54~8vUt1C!-KL22=gCv_t|H``17#e`St^qHS(UTglHwSc{$cv# zbd?+&Xc>?TBhvUO`JA>t^ZXcND!-Rj+O!F0G1a?0v9C0{gO=9F;n!3;2Ew{Qpgn@T z+|bQArXa|rR^FY8gomUPk->;kOh48o)58yV%n>YWUp=DD{TK28^XwF@Vts~i8?^*KV3be@l)&Xr-o_#(s&w4b} z5K0sOE%K;rCKm7*P-A5GZzMT+T4*1H0lgyD8D{qAKWG&VW6-<~Hv3l&ngPuVBy7vq z-QFSw-h?^v^?8s;Sq06;i>*KW2rwQ8Q~tI69Ig<^hk4!KzpE!y2ub4%lzzLb4Ez$p zfBR_zEaNmh5V&hE+O_skSQ>ue^jh6o2r{Eu!axO%zdilmUi}B7uKx?q?~^70`u%Hc z&3=n;^bjyU(+mNIxMtABzh`l-2(+aM%sXwim5^95d+OaIyK{s&O>eV`=|@ZKLaVWH z7s%-=`IOs0d^|B>#0zi_296)=lz{#&VEBI-;-5rj|4aAdp0&MXlzJqjq(dK24Y%?p zhVp^iXUFU7n3+7u()2D>O>d78EL&Uo>(?I(+yD3A&U$hI?kb6)$yF{?m<(PkdI!B3 zetlZQqLh0c*)x#GqQ{K8R9*YMZQzxEkBR+BM)|yRTpTibJ4xc+u;>}+Rsqz-eZ0YX zL6)!FyC`^ySDN9_A)|!7^DmYMQr(1`$3S>H_q$Bg>a&< zuine!zv|6D(PI%qW9!-)s-kxr7}{Dv&XVwduNnLy)cu`o(A3o)8#t6x@u%bLA3~SE zKDxf=|Bk)S&lTPZL87B(Cyt%}^NNeV`1|+r_YdXiU`a3pEf*qlrPhaEb$#}0W7%DQ zK9%(Ubd4R!SAdqADyLkdv?sZ|!SB~OF@3bMxslx5IzSVxntnc<1a9VcjDNiuGMqFN zVn9fNtxmo!UxDDGGeJ>3tizUBZAaw9bD6*L@?0ku^HG<=@Rd`_. + + +Introduction +============ + +`Dask `_ is a powerful tool for speeding up data handling +via lazy loading and parallel processing. To get the full benefit of using +Dask, it is important to configure it correctly and supply it with +appropriately structured data. For example, we may need to "chunk" data arrays +into smaller pieces to process, read and write it; getting the "chunking" right +can make a significant different to performance! + + +.. _numpy_threads: + +NumPy Threads +============= + +In certain scenarios NumPy will attempt to perform threading using an +external library - typically OMP, MKL or openBLAS - making use of **every** +CPU available. This interacts badly with Dask: + +* Dask may create multiple instances of NumPy, each generating enough + threads to use **all** the available CPUs. The resulting sharing of CPUs + between threads greatly reduces performance. The more cores there are, the + more pronounced this problem is. +* NumPy will generate enough threads to use all available CPUs even + if Dask is deliberately configured to only use a subset of CPUs. The + resulting sharing of CPUs between threads greatly reduces performance. +* `Dask is already designed to parallelise with NumPy arrays `_, so adding NumPy's 'competing' layer of + parallelisation could cause unpredictable performance. + +Therefore it is best to prevent NumPy performing its own parallelisation, `a +suggestion made in Dask's own documentation `_. +The following commands will ensure this in all scenarios: + +in Python... + +:: + + # Must be run before importing NumPy. + import os + os.environ["OMP_NUM_THREADS"] = "1" + os.environ["OPENBLAS_NUM_THREADS"] = "1" + os.environ["MKL_NUM_THREADS"] = "1" + os.environ["VECLIB_MAXIMUM_THREADS"] = "1" + os.environ["NUMEXPR_NUM_THREADS"] = "1" + +or in Linux command line... + +:: + + export OMP_NUM_THREADS=1 + export OPENBLAS_NUM_THREADS=1 + export MKL_NUM_THREADS=1 + export VECLIB_MAXIMUM_THREADS=1 + export NUMEXPR_NUM_THREADS=1 + + +.. _multi-pro_systems: + +Dask on Computing Clusters +========================== + +Dask is well suited for use on computing clusters, but there are some important factors you must be +aware of. In particular, you will always need to explicitly control parallel +operation, both in Dask and likewise in NumPy. + + +.. _multi-pro_slurm: + +CPU Allocation +-------------- + +When running on a computing cluster, unless configured otherwise, Dask will attempt to create +one parallel 'worker' task for each CPU. However, when using a job scheduler such as Slurm, only *some* of +these CPUs are actually accessible -- often, and by default, only one. This leads to a serious +over-commitment unless it is controlled. + +So, **whenever Iris is used on a computing cluster, you must always control the number +of dask workers to a sensible value**, matching the slurm allocation. You do +this with:: + + dask.config.set(num_workers=N) + +For an example, see :doc:`dask_bags_and_greed`. + +Alternatively, when there is only one CPU allocated, it may actually be more +efficient to use a "synchronous" scheduler instead, with:: + + dask.config.set(scheduler='synchronous') + +See the Dask documentation on `Single thread synchronous scheduler +`_. + + +.. _multi-pro_numpy: + +NumPy Threading +--------------- + +NumPy also interrogates the visible number of CPUs to multi-thread its operations. +The large number of CPUs available in a computing cluster will thus cause confusion if NumPy +attempts its own parallelisation, so this must be prevented. Refer back to +:ref:`numpy_threads` for more detail. + + +Distributed +----------- + +Even though allocations on a computing cluster are generally restricted to a single node, there +are still good reasons for using 'dask.distributed' in many cases. See `Single Machine: dask.distributed +`_ in the Dask documentation. + + +Chunking +======== + +Dask breaks down large data arrays into chunks, allowing efficient +parallelisation by processing several smaller chunks simultaneously. For more +information, see the documentation on +`Dask Array `_. + +Iris provides a basic chunking shape to Dask, attempting to set the shape for +best performance. The chunking that is used can depend on the file format that +is being loaded. See below for how chunking is performed for: + + * :ref:`chunking_netcdf` + * :ref:`chunking_pp_ff` + +It can in some cases be beneficial to re-chunk the arrays in Iris cubes. +For information on how to do this, see :ref:`dask_rechunking`. + + +.. _chunking_netcdf: + +NetCDF Files +------------ + +NetCDF files can include their own chunking specification. This is either +specified when creating the file, or is automatically assigned if one or +more of the dimensions is `unlimited `_. +Importantly, netCDF chunk shapes are **not optimised for Dask +performance**. + +Chunking can be set independently for any variable in a netCDF file. +When a netCDF variable uses an unlimited dimension, it is automatically +chunked: the chunking is the shape of the whole variable, but with '1' instead +of the length in any unlimited dimensions. + +When chunking is specified for netCDF data, Iris will set the dask chunking +to an integer multiple or fraction of that shape, such that the data size is +near to but not exceeding the dask array chunk size. + + +.. _chunking_pp_ff: + +PP and Fieldsfiles +------------------ + +PP and Fieldsfiles contain multiple 2D fields of data. When loading PP or +Fieldsfiles into Iris cubes, the chunking will automatically be set to a chunk +per field. + +For example, if a PP file contains 2D lat-lon fields for each of the +85 model level numbers, it will load in a cube that looks as follows:: + + (model_level_number: 85; latitude: 144; longitude: 192) + +The data in this cube will be partitioned with chunks of shape +:code:`(1, 144, 192)`. + +If the file(s) being loaded contain multiple fields, this can lead to an +excessive amount of chunks which will result in poor performance. + +When the default chunking is not appropriate, it is possible to rechunk. +:doc:`dask_pp_to_netcdf` provides a detailed demonstration of how Dask can optimise +that process. + + +Examples +======== + +We have written some examples of use cases for using Dask, that come with advice and +explanations for why and how the tasks are performed the way they are. + +If you feel you have an example of a Dask best practice that you think may be helpful to others, +please share them with us by raising a new `discussion on the Iris repository `_. + + * :doc:`dask_pp_to_netcdf` + * :doc:`dask_parallel_loop` + * :doc:`dask_bags_and_greed` + +.. toctree:: + :hidden: + :maxdepth: 1 + + dask_pp_to_netcdf + dask_parallel_loop + dask_bags_and_greed diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst index fdd0c4d03e..771aa450a3 100644 --- a/docs/src/userguide/index.rst +++ b/docs/src/userguide/index.rst @@ -45,4 +45,5 @@ they may serve as a useful reference for future exploration. ../further_topics/metadata ../further_topics/lenient_metadata ../further_topics/lenient_maths + ../further_topics/dask_best_practices/index ../further_topics/ugrid/index diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 0e2896b7a1..9b62715be6 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -71,6 +71,10 @@ This document explains the changes made to Iris for this release to use it. By default the theme will be based on the users system settings, defaulting to ``light`` if no system setting is found. (:pull:`5299`) +#. `@HGWright`_ added a :doc:`/further_topics/dask_best_practices/index` + section into the user guide, containing advice and use cases to help users + get the best out of Dask with Iris. + 💼 Internal =========== From 43ce78a13ffb73feed990616c1953a30b5b824c8 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Thu, 15 Jun 2023 11:54:43 +0100 Subject: [PATCH 003/134] docs linkcheck ignore biggus rtd url (#5351) --- docs/src/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/src/conf.py b/docs/src/conf.py index b7f87d4ebc..a204263a24 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -383,6 +383,7 @@ def _dotv(version): "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", "http://www.nationalarchives.gov.uk/doc/open-government-licence", "https://www.metoffice.gov.uk/", + "https://biggus.readthedocs.io/", ] # list of sources to exclude from the build. From 13b2a7466c71121bf5d3261f3ddf9803b46ca694 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Jun 2023 12:22:55 +0100 Subject: [PATCH 004/134] Bump scitools/workflows from 2023.05.0 to 2023.06.0 (#5350) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2023.05.0 to 2023.06.0. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2023.05.0...2023.06.0) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index dd017fd84b..c42eb90104 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.05.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.06.0 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 4068d68ed8..453014fa2a 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.05.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.06.0 secrets: inherit From eaa7ee40eae350dff82528555e350ea6d964879e Mon Sep 17 00:00:00 2001 From: stephenworsley <49274989+stephenworsley@users.noreply.github.com> Date: Thu, 15 Jun 2023 14:30:48 +0100 Subject: [PATCH 005/134] Fix #5347 (#5349) * fix unit multiplication bug * docs linkcheck ignore biggus rtd url (#5351) * add patch whatsnew --------- Co-authored-by: Bill Little --- docs/src/conf.py | 1 + docs/src/whatsnew/3.6.rst | 20 +++++++++++++++++++- lib/iris/cube.py | 3 ++- lib/iris/tests/unit/cube/test_Cube.py | 10 ++++++++++ 4 files changed, 32 insertions(+), 2 deletions(-) diff --git a/docs/src/conf.py b/docs/src/conf.py index 312925b4ea..94106b952f 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -382,6 +382,7 @@ def _dotv(version): "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", "http://www.nationalarchives.gov.uk/doc/open-government-licence", "https://www.metoffice.gov.uk/", + "https://biggus.readthedocs.io/", ] # list of sources to exclude from the build. diff --git a/docs/src/whatsnew/3.6.rst b/docs/src/whatsnew/3.6.rst index 151c63ef51..543ce7372e 100644 --- a/docs/src/whatsnew/3.6.rst +++ b/docs/src/whatsnew/3.6.rst @@ -55,6 +55,23 @@ This document explains the changes made to Iris for this release or feature requests for improving Iris. Enjoy! +|iris_version| |build_date| +=========================== + +.. dropdown:: |iris_version| Patches + :color: primary + :icon: alert + :animate: fade-in + + The patches in this release of Iris include: + + 🐛 **Bugs Fixed** + + #. `@stephenworsley`_ fixed :meth:`~iris.cube.Cube.convert_units` to allow unit + conversion of lazy data when using a `Distributed`_ scheduler. + (:issue:`5347`, :pull:`5349`) + + 📢 Announcements ================ @@ -180,4 +197,5 @@ This document explains the changes made to Iris for this release .. _PEP-0621: https://peps.python.org/pep-0621/ .. _pypa/build: https://pypa-build.readthedocs.io/en/stable/ .. _NEP29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _Contributor Covenant: https://www.contributor-covenant.org/version/2/1/code_of_conduct/ \ No newline at end of file +.. _Contributor Covenant: https://www.contributor-covenant.org/version/2/1/code_of_conduct/ +.. _Distributed: https://distributed.dask.org/en/stable/ diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 7c6fd55c10..44a40cf72b 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -19,6 +19,7 @@ from xml.dom.minidom import Document import zlib +from cf_units import Unit import dask.array as da import numpy as np import numpy.ma as ma @@ -1144,7 +1145,7 @@ def convert_units(self, unit): ) if self.has_lazy_data(): # Make fixed copies of old + new units for a delayed conversion. - old_unit = self.units + old_unit = Unit(self.units) new_unit = unit pointwise_convert = partial(old_unit.convert, other=new_unit) diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index aa9e3b51b1..28fbe429c1 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -14,6 +14,8 @@ from unittest import mock from cf_units import Unit +import dask.array as da +from distributed import Client import numpy as np import numpy.ma as ma import pytest @@ -3012,6 +3014,14 @@ def test_preserves_lazy(self): self.assertTrue(cube.has_lazy_data()) self.assertArrayAllClose(cube.data, real_data_ft) + def test_unit_multiply(self): + _client = Client() + cube = iris.cube.Cube(da.arange(1), units="m") + cube.units *= "s-1" + cube.convert_units("m s-1") + cube.data + _client.close() + class Test__eq__data(tests.IrisTest): """Partial cube equality testing, for data type only.""" From 438f292e14119649b1fdab39b436efb9f18a19c0 Mon Sep 17 00:00:00 2001 From: sloosvel <45196700+sloosvel@users.noreply.github.com> Date: Thu, 15 Jun 2023 18:34:14 +0200 Subject: [PATCH 006/134] Avoid realising auxiliary coordinates in `concatenate` (#5142) * Use core_points instead of points * Remove unused import * Use core_bounds * Correct test * Remove unused import * Correct test * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- lib/iris/_concatenate.py | 75 +++++++++++++------ lib/iris/tests/test_concatenate.py | 25 +++++++ .../unit/concatenate/test_concatenate.py | 25 ++++++- 3 files changed, 102 insertions(+), 23 deletions(-) diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 01a1bb689b..10a31eafc1 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -101,19 +101,23 @@ def __new__(mcs, coord, dims): """ defn = coord.metadata - points_dtype = coord.points.dtype - bounds_dtype = coord.bounds.dtype if coord.bounds is not None else None + points_dtype = coord.core_points().dtype + bounds_dtype = ( + coord.core_bounds().dtype + if coord.core_bounds() is not None + else None + ) kwargs = {} # Add scalar flag metadata. - kwargs["scalar"] = coord.points.size == 1 + kwargs["scalar"] = coord.core_points().size == 1 # Add circular flag metadata for dimensional coordinates. if hasattr(coord, "circular"): kwargs["circular"] = coord.circular if isinstance(coord, iris.coords.DimCoord): # Mix the monotonic ordering into the metadata. - if coord.points[0] == coord.points[-1]: + if coord.core_points()[0] == coord.core_points()[-1]: order = _CONSTANT - elif coord.points[-1] > coord.points[0]: + elif coord.core_points()[-1] > coord.core_points()[0]: order = _INCREASING else: order = _DECREASING @@ -700,18 +704,27 @@ def _cmp(coord, other): """ # A candidate axis must have non-identical coordinate points. - candidate_axis = not array_equal(coord.points, other.points) + candidate_axis = not array_equal( + coord.core_points(), other.core_points() + ) if candidate_axis: # Ensure both have equal availability of bounds. - result = (coord.bounds is None) == (other.bounds is None) + result = (coord.core_bounds() is None) == ( + other.core_bounds() is None + ) else: - if coord.bounds is not None and other.bounds is not None: + if ( + coord.core_bounds() is not None + and other.core_bounds() is not None + ): # Ensure equality of bounds. - result = array_equal(coord.bounds, other.bounds) + result = array_equal(coord.core_bounds(), other.core_bounds()) else: # Ensure both have equal availability of bounds. - result = coord.bounds is None and other.bounds is None + result = ( + coord.core_bounds() is None and other.core_bounds() is None + ) return result, candidate_axis @@ -762,21 +775,37 @@ def _calculate_extents(self): self.dim_extents = [] for coord, order in zip(self.dim_coords, self.dim_order): if order == _CONSTANT or order == _INCREASING: - points = _Extent(coord.points[0], coord.points[-1]) - if coord.bounds is not None: + points = _Extent( + coord.core_points()[0], coord.core_points()[-1] + ) + if coord.core_bounds() is not None: bounds = ( - _Extent(coord.bounds[0, 0], coord.bounds[-1, 0]), - _Extent(coord.bounds[0, 1], coord.bounds[-1, 1]), + _Extent( + coord.core_bounds()[0, 0], + coord.core_bounds()[-1, 0], + ), + _Extent( + coord.core_bounds()[0, 1], + coord.core_bounds()[-1, 1], + ), ) else: bounds = None else: # The order must be decreasing ... - points = _Extent(coord.points[-1], coord.points[0]) - if coord.bounds is not None: + points = _Extent( + coord.core_points()[-1], coord.core_points()[0] + ) + if coord.core_bounds() is not None: bounds = ( - _Extent(coord.bounds[-1, 0], coord.bounds[0, 0]), - _Extent(coord.bounds[-1, 1], coord.bounds[0, 1]), + _Extent( + coord.core_bounds()[-1, 0], + coord.core_bounds()[0, 0], + ), + _Extent( + coord.core_bounds()[-1, 1], + coord.core_bounds()[0, 1], + ), ) else: bounds = None @@ -1095,7 +1124,7 @@ def _build_aux_coordinates(self): # Concatenate the points together. dim = dims.index(self.axis) points = [ - skton.signature.aux_coords_and_dims[i].coord.points + skton.signature.aux_coords_and_dims[i].coord.core_points() for skton in skeletons ] points = np.concatenate(tuple(points), axis=dim) @@ -1104,7 +1133,9 @@ def _build_aux_coordinates(self): bnds = None if coord.has_bounds(): bnds = [ - skton.signature.aux_coords_and_dims[i].coord.bounds + skton.signature.aux_coords_and_dims[ + i + ].coord.core_bounds() for skton in skeletons ] bnds = np.concatenate(tuple(bnds), axis=dim) @@ -1307,7 +1338,7 @@ def _build_dim_coordinates(self): # Concatenate the points together for the nominated dimension. points = [ - skeleton.signature.dim_coords[dim_ind].points + skeleton.signature.dim_coords[dim_ind].core_points() for skeleton in skeletons ] points = np.concatenate(tuple(points)) @@ -1316,7 +1347,7 @@ def _build_dim_coordinates(self): bounds = None if self._cube_signature.dim_coords[dim_ind].has_bounds(): bounds = [ - skeleton.signature.dim_coords[dim_ind].bounds + skeleton.signature.dim_coords[dim_ind].core_bounds() for skeleton in skeletons ] bounds = np.concatenate(tuple(bounds)) diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index e4c22f49b0..7cb11189d6 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -12,6 +12,7 @@ # before importing anything else. import iris.tests as tests # isort:skip +import dask.array as da import numpy as np import numpy.ma as ma @@ -800,6 +801,30 @@ def test_concat_2y2d_derived_x_y_xy(self): self.assertEqual(result[0].shape, (6, 2)) self.assertEqual(result[0], com) + def test_concat_lazy_aux_coords(self): + cubes = [] + y = (0, 2) + cube = _make_cube((2, 4), y, 2, aux="xy") + cubes.append(cube) + cubes.append(_make_cube((0, 2), y, 1, aux="xy")) + for cube in cubes: + cube.data = cube.lazy_data() + cube.coord("xy-aux").points = cube.coord("xy-aux").lazy_points() + bounds = da.arange( + 4 * cube.coord("xy-aux").core_points().size + ).reshape(cube.shape + (4,)) + cube.coord("xy-aux").bounds = bounds + result = concatenate(cubes) + + self.assertTrue(cubes[0].coord("xy-aux").has_lazy_points()) + self.assertTrue(cubes[0].coord("xy-aux").has_lazy_bounds()) + + self.assertTrue(cubes[1].coord("xy-aux").has_lazy_points()) + self.assertTrue(cubes[1].coord("xy-aux").has_lazy_bounds()) + + self.assertTrue(result[0].coord("xy-aux").has_lazy_points()) + self.assertTrue(result[0].coord("xy-aux").has_lazy_bounds()) + class TestMulti2D(tests.IrisTest): def test_concat_4x2d_aux_xy(self): diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index a4243dfbbc..bb3770cf0f 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -355,7 +355,7 @@ def test_desc_bounds_all_singleton(self): class TestConcatenate__dask(tests.IrisTest): - def build_lazy_cube(self, points, bounds=None, nx=4): + def build_lazy_cube(self, points, bounds=None, nx=4, aux_coords=False): data = np.arange(len(points) * nx).reshape(len(points), nx) data = as_lazy_data(data) cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") @@ -363,6 +363,15 @@ def build_lazy_cube(self, points, bounds=None, nx=4): lon = iris.coords.DimCoord(np.arange(nx), "longitude") cube.add_dim_coord(lat, 0) cube.add_dim_coord(lon, 1) + if aux_coords: + bounds = np.arange(len(points) * nx * 4).reshape( + len(points), nx, 4 + ) + bounds = as_lazy_data(bounds) + aux_coord = iris.coords.AuxCoord( + data, var_name="aux_coord", bounds=bounds + ) + cube.add_aux_coord(aux_coord, (0, 1)) return cube def test_lazy_concatenate(self): @@ -372,6 +381,20 @@ def test_lazy_concatenate(self): self.assertTrue(cube.has_lazy_data()) self.assertFalse(ma.isMaskedArray(cube.data)) + def test_lazy_concatenate_aux_coords(self): + c1 = self.build_lazy_cube([1, 2], aux_coords=True) + c2 = self.build_lazy_cube([3, 4, 5], aux_coords=True) + (result,) = concatenate([c1, c2]) + + self.assertTrue(c1.coord("aux_coord").has_lazy_points()) + self.assertTrue(c1.coord("aux_coord").has_lazy_bounds()) + + self.assertTrue(c2.coord("aux_coord").has_lazy_points()) + self.assertTrue(c2.coord("aux_coord").has_lazy_bounds()) + + self.assertTrue(result.coord("aux_coord").has_lazy_points()) + self.assertTrue(result.coord("aux_coord").has_lazy_bounds()) + def test_lazy_concatenate_masked_array_mixed_deferred(self): c1 = self.build_lazy_cube([1, 2]) c2 = self.build_lazy_cube([3, 4, 5]) From 99fa8cebda439a1c900627206e7977d536b5358e Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Fri, 16 Jun 2023 11:18:56 +0200 Subject: [PATCH 007/134] Fix concatenation of cubes with aux factories (#5340) * Fix concatenation of cubes with aux factories * Added What's new entry * Added comment why all aux factories need to be updated during concatenation --- docs/src/whatsnew/3.6.rst | 5 + lib/iris/_concatenate.py | 95 ++++++++++++------- .../concatenate/test_concatenate.py | 11 +++ 3 files changed, 79 insertions(+), 32 deletions(-) diff --git a/docs/src/whatsnew/3.6.rst b/docs/src/whatsnew/3.6.rst index 543ce7372e..61bf75f15c 100644 --- a/docs/src/whatsnew/3.6.rst +++ b/docs/src/whatsnew/3.6.rst @@ -71,6 +71,11 @@ This document explains the changes made to Iris for this release conversion of lazy data when using a `Distributed`_ scheduler. (:issue:`5347`, :pull:`5349`) + #. `@schlunma`_ fixed a bug in the concatenation of cubes with aux factories + which could lead to a `KeyError` due to dependencies that have not been + properly updated. + (:issue:`5339`, :pull:`5340`) + 📢 Announcements ================ diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 10a31eafc1..d98e63da5a 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -880,9 +880,13 @@ def concatenate(self): # Concatenate the new dimension coordinate. dim_coords_and_dims = self._build_dim_coordinates() - # Concatenate the new auxiliary coordinates. + # Concatenate the new auxiliary coordinates (does NOT include + # scalar coordinates!). aux_coords_and_dims = self._build_aux_coordinates() + # Concatenate the new scalar coordinates. + scalar_coords = self._build_scalar_coordinates() + # Concatenate the new cell measures cell_measures_and_dims = self._build_cell_measures() @@ -891,18 +895,21 @@ def concatenate(self): # Concatenate the new aux factories aux_factories = self._build_aux_factories( - dim_coords_and_dims, aux_coords_and_dims + dim_coords_and_dims, aux_coords_and_dims, scalar_coords ) # Concatenate the new data payload. data = self._build_data() # Build the new cube. + all_aux_coords_and_dims = aux_coords_and_dims + [ + (scalar_coord, ()) for scalar_coord in scalar_coords + ] kwargs = cube_signature.defn._asdict() cube = iris.cube.Cube( data, dim_coords_and_dims=dim_coords_and_dims, - aux_coords_and_dims=aux_coords_and_dims, + aux_coords_and_dims=all_aux_coords_and_dims, cell_measures_and_dims=cell_measures_and_dims, ancillary_variables_and_dims=ancillary_variables_and_dims, aux_factories=aux_factories, @@ -1163,12 +1170,22 @@ def _build_aux_coordinates(self): aux_coords_and_dims.append((coord.copy(), dims)) - # Generate all the scalar coordinates for the new concatenated cube. - for coord in cube_signature.scalar_coords: - aux_coords_and_dims.append((coord.copy(), ())) - return aux_coords_and_dims + def _build_scalar_coordinates(self): + """ + Generate the scalar coordinates for the new concatenated cube. + + Returns: + A list of scalar coordinates. + + """ + scalar_coords = [] + for coord in self._cube_signature.scalar_coords: + scalar_coords.append(coord.copy()) + + return scalar_coords + def _build_cell_measures(self): """ Generate the cell measures with associated dimension(s) @@ -1247,7 +1264,9 @@ def _build_ancillary_variables(self): return ancillary_variables_and_dims - def _build_aux_factories(self, dim_coords_and_dims, aux_coords_and_dims): + def _build_aux_factories( + self, dim_coords_and_dims, aux_coords_and_dims, scalar_coords + ): """ Generate the aux factories for the new concatenated cube. @@ -1261,6 +1280,9 @@ def _build_aux_factories(self, dim_coords_and_dims, aux_coords_and_dims): A list of auxiliary coordinates and dimension(s) tuple pairs from the concatenated cube. + * scalar_coords: + A list of scalar coordinates from the concatenated cube. + Returns: A list of :class:`iris.aux_factory.AuxCoordFactory`. @@ -1271,35 +1293,44 @@ def _build_aux_factories(self, dim_coords_and_dims, aux_coords_and_dims): old_aux_coords = [a[0] for a in cube_signature.aux_coords_and_dims] new_dim_coords = [d[0] for d in dim_coords_and_dims] new_aux_coords = [a[0] for a in aux_coords_and_dims] - scalar_coords = cube_signature.scalar_coords + old_scalar_coords = cube_signature.scalar_coords + new_scalar_coords = scalar_coords aux_factories = [] # Generate all the factories for the new concatenated cube. - for i, (coord, dims, factory) in enumerate( - cube_signature.derived_coords_and_dims - ): - # Check whether the derived coordinate of the factory spans the - # nominated dimension of concatenation. - if self.axis in dims: - # Update the dependencies of the factory with coordinates of - # the concatenated cube. We need to check all coordinate types - # here (dim coords, aux coords, and scalar coords). - new_dependencies = {} - for old_dependency in factory.dependencies.values(): - if old_dependency in old_dim_coords: - dep_idx = old_dim_coords.index(old_dependency) - new_dependency = new_dim_coords[dep_idx] - elif old_dependency in old_aux_coords: - dep_idx = old_aux_coords.index(old_dependency) - new_dependency = new_aux_coords[dep_idx] - else: - dep_idx = scalar_coords.index(old_dependency) - new_dependency = scalar_coords[dep_idx] - new_dependencies[id(old_dependency)] = new_dependency + for _, _, factory in cube_signature.derived_coords_and_dims: + # Update the dependencies of the factory with coordinates of + # the concatenated cube. We need to check all coordinate types + # here (dim coords, aux coords, and scalar coords). + + # Note: in contrast to other _build_... methods of this class, we + # do NOT need to distinguish between aux factories that span the + # nominated concatenation axis and aux factories that do not. The + # reason is that ALL aux factories need to be updated with the new + # coordinates of the concatenated cube (passed to this function via + # dim_coords_and_dims, aux_coords_and_dims, scalar_coords [these + # contain ALL new coordinates, not only the ones spanning the + # concatenation dimension]), so no special treatment for the aux + # factories that span the concatenation dimension is necessary. If + # not all aux factories are properly updated with references to the + # new coordinates, this may lead to KeyErrors (see + # https://github.com/SciTools/iris/issues/5339). + new_dependencies = {} + for old_dependency in factory.dependencies.values(): + if old_dependency in old_dim_coords: + dep_idx = old_dim_coords.index(old_dependency) + new_dependency = new_dim_coords[dep_idx] + elif old_dependency in old_aux_coords: + dep_idx = old_aux_coords.index(old_dependency) + new_dependency = new_aux_coords[dep_idx] + else: + dep_idx = old_scalar_coords.index(old_dependency) + new_dependency = new_scalar_coords[dep_idx] + new_dependencies[id(old_dependency)] = new_dependency - # Create new factory with the updated dependencies. - factory = factory.updated(new_dependencies) + # Create new factory with the updated dependencies. + factory = factory.updated(new_dependencies) aux_factories.append(factory) diff --git a/lib/iris/tests/integration/concatenate/test_concatenate.py b/lib/iris/tests/integration/concatenate/test_concatenate.py index 1f39b2589d..2543e2931b 100644 --- a/lib/iris/tests/integration/concatenate/test_concatenate.py +++ b/lib/iris/tests/integration/concatenate/test_concatenate.py @@ -278,6 +278,17 @@ def test_equal_derived_coords(self): [[10.0, 20.0], [10.0, 40.0], [10.0, 20.0], [10.0, 40.0]], ) + # Make sure indexing the resulting cube works correctly + # (see https://github.com/SciTools/iris/issues/5339) + self.assertEqual(result[0][0].shape, (2,)) + + # Make sure ALL aux factory dependencies of the resulting cube were + # properly updated (i.e., they are different from the original cubes). + for aux_factory in result[0].aux_factories: + for coord in aux_factory.dependencies.values(): + self.assertNotEqual(id(coord), id(cube_a.coord(coord.name()))) + self.assertNotEqual(id(coord), id(cube_b.coord(coord.name()))) + def test_equal_derived_coords_with_bounds(self): cube_a = self.create_cube() cube_a.coord("sigma").bounds = [[0.0, 5.0], [5.0, 20.0]] From c710a2edbdca2372d379f4532546e871ce745d26 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Fri, 16 Jun 2023 10:32:13 +0100 Subject: [PATCH 008/134] add whatsnew for #5142 (#5352) Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- docs/src/whatsnew/3.6.rst | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/src/whatsnew/3.6.rst b/docs/src/whatsnew/3.6.rst index 61bf75f15c..136d47e15d 100644 --- a/docs/src/whatsnew/3.6.rst +++ b/docs/src/whatsnew/3.6.rst @@ -63,6 +63,11 @@ This document explains the changes made to Iris for this release :icon: alert :animate: fade-in + 📢 **Announcements** + + Welcome and congratulations to `@sloosvel`_ who made their first contribution to + Iris! 🎉 + The patches in this release of Iris include: 🐛 **Bugs Fixed** @@ -76,6 +81,15 @@ This document explains the changes made to Iris for this release properly updated. (:issue:`5339`, :pull:`5340`) + 🚀 **Performance Enhancements** + + #. `@sloosvel`_ improved :meth:`~iris.cube.CubeList.concatenate_cube` and + :meth:`~iris.cube.CubeList.concatenate` to ensure that lazy auxiliary coordinate + points and bounds are not realized. This change now allows cubes with + high-resolution auxiliary coordinates to concatenate successfully whilst using a + minimal in-core memory footprint. + (:issue:`5115`, :pull:`5142`) + 📢 Announcements ================ @@ -191,6 +205,7 @@ This document explains the changes made to Iris for this release core dev names are automatically included by the common_links.inc: .. _@fnattino: https://github.com/fnattino +.. _@sloosvel: https://github.com/sloosvel .. comment From 2e6ca8dd08489ce557a59edd138f03ee7f31b5fa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 22:17:16 +0100 Subject: [PATCH 009/134] [pre-commit.ci] pre-commit autoupdate (#5354) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/codespell-project/codespell: v2.2.4 → v2.2.5](https://github.com/codespell-project/codespell/compare/v2.2.4...v2.2.5) - [github.com/asottile/blacken-docs: 1.13.0 → 1.14.0](https://github.com/asottile/blacken-docs/compare/1.13.0...1.14.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3b8920c694..500f69134f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/codespell-project/codespell - rev: "v2.2.4" + rev: "v2.2.5" hooks: - id: codespell types_or: [asciidoc, python, markdown, rst] @@ -56,7 +56,7 @@ repos: args: [--filter-files] - repo: https://github.com/asottile/blacken-docs - rev: 1.13.0 + rev: 1.14.0 hooks: - id: blacken-docs types: [file, rst] From b3bef2627d95840e4495a9c77d4ab5f1b4e75ec1 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Tue, 20 Jun 2023 13:24:56 +0100 Subject: [PATCH 010/134] Updated environment lockfiles (#5342) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 78 ++++++++++++------------- requirements/locks/py311-linux-64.lock | 80 +++++++++++++------------- requirements/locks/py39-linux-64.lock | 76 ++++++++++++------------ 3 files changed, 117 insertions(+), 117 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 83cf6efc0f..a0632e193e 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -9,16 +9,16 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-3_cp310.conda#4eb33d14d794b0f4be116443ffed3853 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d -https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 @@ -43,21 +43,20 @@ https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 @@ -71,7 +70,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a @@ -82,31 +81,30 @@ https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.con https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.conda#cf4a8f520fdad3a63bb2bce74576cd2d +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_3.conda#3cbb1d20331e8b1f170de5cef410cd80 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.4-h5cf9203_0.conda#7be3251c7b337e46bea0b8f3a3ed3c58 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-ha587672_6.conda#4e5ee4b062c21519efbee7e2ae608748 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_2.conda#20b4708cd04bdc8138d03314ddd97885 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_3.conda#247055daf6d75d72ac0e94de151e8ab0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.11-he550d4f_0_cpython.conda#7439c9d24378a82b73a7a53868dacdf1 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc @@ -115,7 +113,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda# https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.4-h8ee46fc_1.conda#52d09ea80a42c0466214609ef0a2d62d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b @@ -133,26 +131,26 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.0-pyhd8ed1ab_0.conda#650f18a56f366dbf419c15b543592c2d +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.5.0-pyh1a96a4e_0.conda#20edd290b319aa0eff3e9055375756dc +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_1.tar.bz2#ad5647e517ba68e2868ef2e6e6ff7723 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_0.conda#b10174a063ec195f8fe1b278282c3149 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py310h1fa729e_0.conda#a1f0db6709778b77b5903541eeac4032 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_0.conda#5597d9f9778af6883ae64f0e7d39416c https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py310h8deb116_0.conda#b7085457309e206174b8e234d90a7605 @@ -190,6 +188,7 @@ https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 @@ -200,30 +199,32 @@ https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py310hdf3cbec_0.conda#7bf9d8c765b6b04882c719509652c6d6 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py310hd41b1e2_0.conda#684399f9ddc0b9d6f3b6164f6107098e https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py310h2372a71_0.conda#13df1c4ea94f2e3326b15da1999e5999 https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py310h5764c6d_1.tar.bz2#fd18cd597d23b2b5ddde23bd5b7aec32 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py310h2372a71_0.conda#76426eaff204520e719207700359a855 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py310h2372a71_0.conda#d3d83b419c81ac718a9221442707882b https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.4-default_h1cdf331_0.conda#5bb4fde7a7ea23ea471b171561943aec +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py310h582fbeb_1.conda#cf62f6cff3536eafaaa0c740b0bf7465 https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df -https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.2-pyhd8ed1ab_1.conda#f2465696f4396245eca4613f6e924796 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h056c13c_1.conda#32d925cfd330e0cbb72b7618558a44e8 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py310hc6cd4ac_0.conda#a3217e1bff09702dfdfcb536825fc12d https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#ae465d0fbf9f1979cb2d8d4043d885e2 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df @@ -232,31 +233,30 @@ https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_1 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py310he60537e_0.conda#68b2dd34c69d08b05a9db5e3596fe3ee https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py310h7cbd5c2_0.conda#e0b845c6b29a1ed2e409bef6c0f5d96b https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py310hb814896_1.conda#d44c6841ee904252e0e8b7a1c7b11383 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.3-pyhd8ed1ab_0.conda#c085a16ba3d0c9ee282c438308b57724 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py310h24ef57a_1.conda#a689e86d7bbab67f889fc384aa72b088 https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310heca2aa9_3.conda#3b1946b676534472ce65181dda0b9554 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.5.1-pyhd8ed1ab_0.conda#b90a2dec6d308d71649dbe58dc32c337 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.6.0-pyhd8ed1ab_0.conda#e2c66ccd8a5eedaddcb23739ed38ed27 https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.3-nompi_py310h2d0b64f_102.conda#7e4ea99dccc0dd27fd1c87821ba8ef13 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hde23a83_100.conda#d5de42b3b49fb20e01d1003085ef588f https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 3fb33e82cd..81e5cb1048 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -9,16 +9,16 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-3_cp311.conda#c2e2630ddb68cf52eec74dc7dfab20b5 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d -https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 @@ -43,21 +43,20 @@ https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 @@ -71,7 +70,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a @@ -82,40 +81,39 @@ https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.con https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.conda#cf4a8f520fdad3a63bb2bce74576cd2d +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_3.conda#3cbb1d20331e8b1f170de5cef410cd80 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.4-h5cf9203_0.conda#7be3251c7b337e46bea0b8f3a3ed3c58 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-ha587672_6.conda#4e5ee4b062c21519efbee7e2ae608748 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_2.conda#20b4708cd04bdc8138d03314ddd97885 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_3.conda#247055daf6d75d72ac0e94de151e8ab0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.3-h2755cc3_0_cpython.conda#37005ea5f68df6a8a381b70cf4d4a160 +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.4-hab00c5b_0_cpython.conda#1c628861a2a126b9fc9363ca1b7d014e https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.4-h8ee46fc_1.conda#52d09ea80a42c0466214609ef0a2d62d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b @@ -133,26 +131,26 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.0-pyhd8ed1ab_0.conda#650f18a56f366dbf419c15b543592c2d +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.5.0-pyh1a96a4e_0.conda#20edd290b319aa0eff3e9055375756dc +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py311h4dd048b_1.tar.bz2#46d451f575392c01dc193069bd89766d https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_0.conda#b10174a063ec195f8fe1b278282c3149 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py311h2582759_0.conda#adb20bd57069614552adac60a020c36d +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_0.conda#9904dc4adb5d547cb21e136f98cb24b0 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py311h8e6699e_0.conda#90db8cc0dfa20853329bfc6642f887aa @@ -189,6 +187,7 @@ https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py311h459d7ec_0.co https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72_0.conda#4a3014a4d107d15475d106b751c4e352 https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 @@ -199,30 +198,32 @@ https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py311ha3edf6b_0.conda#e7548e7f58965a2fe97a95950a5fedc6 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py311h9547e67_0.conda#daf3f23397ab2265d0cdfa339f3627ba https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py311h459d7ec_0.conda#3c2c65575c28b23afc5e4ff721a2fc9f https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py311hd4cff14_1.tar.bz2#21523141b35484b1edafba962c6ea883 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py311h459d7ec_0.conda#ddd2cd004e10bc7a1e042283326cbf91 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py311h459d7ec_0.conda#b19f671a6b221f922cf871d71a71c0fa https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.4-default_h1cdf331_0.conda#5bb4fde7a7ea23ea471b171561943aec +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py311h0b84326_1.conda#6be2190fdbf26a6c1d3356a54d955237 https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df -https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.2-pyhd8ed1ab_1.conda#f2465696f4396245eca4613f6e924796 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311h54d622a_1.conda#a894c65b48676c4973e9ee8b59bceb9e https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py311hb755f60_0.conda#2b5430f2f1651f460c852e1fdd549184 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#ae465d0fbf9f1979cb2d8d4043d885e2 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df @@ -231,31 +232,30 @@ https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_1 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py311h8597a09_0.conda#70c3b734ffe82c16b6d121aaa11929a8 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py311h320fe9a_0.conda#509769b430266dc5c2f6a3eab0f23164 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py311h1850bce_1.conda#572159a946b809df471b11db4995c708 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.3-pyhd8ed1ab_0.conda#c085a16ba3d0c9ee282c438308b57724 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py311ha169711_1.conda#92633556d37e88ce45193374d408072c https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py311hcafe171_3.conda#0d79df2a96f6572fed2883374400b235 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.5.1-pyhd8ed1ab_0.conda#b90a2dec6d308d71649dbe58dc32c337 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.6.0-pyhd8ed1ab_0.conda#e2c66ccd8a5eedaddcb23739ed38ed27 https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.3-nompi_py311h1717473_102.conda#d3b4d3ed2f3188d27d43e2c95d0dc2ab +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311h4d7c953_100.conda#c03492d0342e512e58aa2d6c5fdaaa91 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 8e584a3a74..58fbb19b52 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -9,16 +9,16 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda#0dd193187d54e585cac7eab942a8847e https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d -https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 @@ -43,21 +43,20 @@ https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 @@ -71,7 +70,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a @@ -82,31 +81,30 @@ https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.con https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_2.conda#cf4a8f520fdad3a63bb2bce74576cd2d +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_3.conda#3cbb1d20331e8b1f170de5cef410cd80 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.4-h5cf9203_0.conda#7be3251c7b337e46bea0b8f3a3ed3c58 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-ha587672_6.conda#4e5ee4b062c21519efbee7e2ae608748 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_2.conda#20b4708cd04bdc8138d03314ddd97885 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_3.conda#247055daf6d75d72ac0e94de151e8ab0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.16-h2782a2a_0_cpython.conda#95c9b7c96a7fd7342e0c9d0a917b8f78 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc @@ -115,7 +113,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda# https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.4-h8ee46fc_1.conda#52d09ea80a42c0466214609ef0a2d62d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b @@ -133,26 +131,26 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py39hf3d152e_3.tar.bz2#4f0fa7459a1f40a969aaad418b1c428c https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.0-pyhd8ed1ab_0.conda#650f18a56f366dbf419c15b543592c2d +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.5.0-pyh1a96a4e_0.conda#20edd290b319aa0eff3e9055375756dc +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_1.tar.bz2#41679a052a8ce841c74df1ebc802e411 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.4-default_h4d60ac6_0.conda#3309280871a6ccbfd84bd7f53d559153 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_0.conda#b10174a063ec195f8fe1b278282c3149 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py39h72bdee0_0.conda#35514f5320206df9f4661c138c02e1c1 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_0.conda#9c858d105816f454c6b64f3e19184b60 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.5-py39h3d75532_0.conda#ea5d332e361eb72c2593cf79559bc0ec @@ -190,6 +188,7 @@ https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 @@ -200,30 +199,32 @@ https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py39h4b4f3f3_0.conda#c5387f3fb1f5b8b71e1c865fc55f4951 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py39h7633fee_0.conda#54e6f32e448fdc273606011f0940d076 https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py39hb9d737c_1.tar.bz2#eb31327ace8dac15c2df243d9505a132 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py39hd1e30aa_0.conda#80605b792f58cf5c78a5b7e20cef1e35 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py39hd1e30aa_0.conda#5f7c468bf9d9551a80187db7e809ef1f https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.12.0-pyhd8ed1ab_0.conda#e5fd2260a231ee63b6969f4801082f2b https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.4-default_h1cdf331_0.conda#5bb4fde7a7ea23ea471b171561943aec +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py39haaeba84_1.conda#d7aa9b99ed6ade75fbab1e4cedcb3ce2 https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df -https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.2-pyhd8ed1ab_1.conda#f2465696f4396245eca4613f6e924796 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39hf1c3bca_1.conda#ae6bfe65e81d9b59a71cc01a2858650f https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py39h3d6467e_0.conda#6d990f672cc70e5c480ddb74b789a17c https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.2-pyhd8ed1ab_0.conda#81a763f3c64fe6d5f32e033b0325265d +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#ae465d0fbf9f1979cb2d8d4043d885e2 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df @@ -232,31 +233,30 @@ https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_ https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py39h40cae4c_0.conda#de99b3f807c0b295a7df94623df0fb4c https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.1-pyhd8ed1ab_0.conda#e2be672aece1f060adf7154f76531a35 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py39h718ffca_1.conda#a19bf4be7ebce54623541fa4ad22abb4 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.3-pyhd8ed1ab_0.conda#c085a16ba3d0c9ee282c438308b57724 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py39h5ed0f51_1.conda#9c455b3b3b55f13b2094932740cd3efb https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h227be39_3.conda#9e381db00691e26bcf670c3586397be1 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.5.1-pyhd8ed1ab_0.conda#b90a2dec6d308d71649dbe58dc32c337 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.6.0-pyhd8ed1ab_0.conda#e2c66ccd8a5eedaddcb23739ed38ed27 https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py39he190548_0.conda#f2a931db797bb58bd335f4a857b4c898 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.3-nompi_py39h369ccc5_102.conda#dda035d195cf87b493bbb0aa296c381c +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4e81c44_100.conda#360163b65cfd5e43ac60de5c6c3a2696 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.5.1-pyhd8ed1ab_0.conda#517e6d85a48d94b1f5997377df53b896 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h0f8d45d_0.conda#74b1d479057aa11a70779c83262df85e https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 From 9e2cff55e3030aaa8fba5aa9124baafb7a7a8fdd Mon Sep 17 00:00:00 2001 From: Bill Little Date: Tue, 20 Jun 2023 14:33:39 +0100 Subject: [PATCH 011/134] address concatenate lazy comparison truthiness (#5353) * address concatenate lazy comparison truthiness * changes with test coverage * add __future__ annotations for py39 * review actions --- lib/iris/_concatenate.py | 36 ++--- lib/iris/tests/unit/concatenate/__init__.py | 136 +++++++++++++++++- .../unit/concatenate/test__CoordMetaData.py | 117 +++++++++++++++ .../unit/concatenate/test__CoordSignature.py | 121 ++++++++++++++++ 4 files changed, 383 insertions(+), 27 deletions(-) create mode 100644 lib/iris/tests/unit/concatenate/test__CoordMetaData.py create mode 100644 lib/iris/tests/unit/concatenate/test__CoordSignature.py diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index d98e63da5a..7bb27eaf83 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -115,9 +115,9 @@ def __new__(mcs, coord, dims): kwargs["circular"] = coord.circular if isinstance(coord, iris.coords.DimCoord): # Mix the monotonic ordering into the metadata. - if coord.core_points()[0] == coord.core_points()[-1]: + if coord.points[0] == coord.points[-1]: order = _CONSTANT - elif coord.core_points()[-1] > coord.core_points()[0]: + elif coord.points[-1] > coord.points[0]: order = _INCREASING else: order = _DECREASING @@ -775,37 +775,21 @@ def _calculate_extents(self): self.dim_extents = [] for coord, order in zip(self.dim_coords, self.dim_order): if order == _CONSTANT or order == _INCREASING: - points = _Extent( - coord.core_points()[0], coord.core_points()[-1] - ) - if coord.core_bounds() is not None: + points = _Extent(coord.points[0], coord.points[-1]) + if coord.bounds is not None: bounds = ( - _Extent( - coord.core_bounds()[0, 0], - coord.core_bounds()[-1, 0], - ), - _Extent( - coord.core_bounds()[0, 1], - coord.core_bounds()[-1, 1], - ), + _Extent(coord.bounds[0, 0], coord.bounds[-1, 0]), + _Extent(coord.bounds[0, 1], coord.bounds[-1, 1]), ) else: bounds = None else: # The order must be decreasing ... - points = _Extent( - coord.core_points()[-1], coord.core_points()[0] - ) - if coord.core_bounds() is not None: + points = _Extent(coord.points[-1], coord.points[0]) + if coord.bounds is not None: bounds = ( - _Extent( - coord.core_bounds()[-1, 0], - coord.core_bounds()[0, 0], - ), - _Extent( - coord.core_bounds()[-1, 1], - coord.core_bounds()[0, 1], - ), + _Extent(coord.bounds[-1, 0], coord.bounds[0, 0]), + _Extent(coord.bounds[-1, 1], coord.bounds[0, 1]), ) else: bounds = None diff --git a/lib/iris/tests/unit/concatenate/__init__.py b/lib/iris/tests/unit/concatenate/__init__.py index cf671a6553..229476f3a6 100644 --- a/lib/iris/tests/unit/concatenate/__init__.py +++ b/lib/iris/tests/unit/concatenate/__init__.py @@ -3,4 +3,138 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the :mod:`iris._concatenate` package.""" +"""Unit-test infrastructure for the :mod:`iris._concatenate` package.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import Any + +import dask.array as da +import numpy as np + +from iris._concatenate import _CONSTANT, _DECREASING, _INCREASING +import iris.common +from iris.coords import AuxCoord, DimCoord + +__all__ = ["ExpectedItem", "N_POINTS", "SCALE_FACTOR", "create_metadata"] + +# number of coordinate points +N_POINTS: int = 10 + +# coordinate points multiplication scale factor +SCALE_FACTOR: int = 10 + + +METADATA = { + "standard_name": "air_temperature", + "long_name": "air temperature", + "var_name": "atemp", + "units": "kelvin", + "attributes": {}, + "coord_system": None, + "climatological": False, + "circular": False, +} + + +@dataclass +class ExpectedItem: + """Expected test result components of :class:`iris._concatenate._CoordMetaData`.""" + + defn: iris.common.DimCoordMetadata | iris.common.CoordMetadata + dims: tuple[int, ...] + points_dtype: np.dtype + bounds_dtype: np.dtype | None = None + kwargs: dict[str, Any] = field(default_factory=dict) + + +@dataclass +class MetaDataItem: + """Test input and expected output from :class:`iris._concatenate._CoordMetaData`.""" + + coord: AuxCoord | DimCoord + dims: tuple[int, ...] + expected: ExpectedItem + + +def create_metadata( + dim_coord: bool = True, + scalar: bool = False, + order: int = None, + circular: bool | None = False, + coord_dtype: np.dtype = None, + lazy: bool = True, + with_bounds: bool | None = False, +) -> MetaDataItem: + """Construct payload for :class:`iris._concatenate.CoordMetaData` testing.""" + if coord_dtype is None: + coord_dtype = np.float32 + + if order is None: + order = _INCREASING + + array_lib = da if lazy else np + bounds = None + + if scalar: + points = array_lib.ones(1, dtype=coord_dtype) + order = _CONSTANT + + if with_bounds: + bounds = array_lib.array([0, 2], dtype=coord_dtype).reshape(1, 2) + else: + if order == _CONSTANT: + points = array_lib.ones(N_POINTS, dtype=coord_dtype) + else: + if order == _DECREASING: + start, stop, step = N_POINTS - 1, -1, -1 + else: + start, stop, step = 0, N_POINTS, 1 + points = ( + array_lib.arange(start, stop, step, dtype=coord_dtype) + * SCALE_FACTOR + ) + + if with_bounds: + offset = SCALE_FACTOR // 2 + bounds = array_lib.vstack( + [points.copy() - offset, points.copy() + offset] + ).T + + bounds_dtype = coord_dtype if with_bounds else None + + values = METADATA.copy() + values["circular"] = circular + CoordClass = DimCoord if dim_coord else AuxCoord + coord = CoordClass(points, bounds=bounds) + if dim_coord and lazy: + # creating a DimCoord *always* results in realized points/bounds. + assert not coord.has_lazy_points() + if with_bounds: + assert not coord.has_lazy_bounds() + metadata = iris.common.DimCoordMetadata(**values) + + if dim_coord: + coord.metadata = metadata + else: + # convert the DimCoordMetadata to a CoordMetadata instance + # and assign to the AuxCoord + coord.metadata = iris.common.CoordMetadata.from_metadata(metadata) + + dims = tuple([dim for dim in range(coord.ndim)]) + kwargs = {"scalar": scalar} + + if dim_coord: + kwargs["circular"] = circular + kwargs["order"] = order + + expected = ExpectedItem( + defn=metadata, + dims=dims, + points_dtype=coord_dtype, + bounds_dtype=bounds_dtype, + kwargs=kwargs, + ) + + return MetaDataItem(coord=coord, dims=dims, expected=expected) diff --git a/lib/iris/tests/unit/concatenate/test__CoordMetaData.py b/lib/iris/tests/unit/concatenate/test__CoordMetaData.py new file mode 100644 index 0000000000..6f29e1f65f --- /dev/null +++ b/lib/iris/tests/unit/concatenate/test__CoordMetaData.py @@ -0,0 +1,117 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit-tests for :class:`iris._concatenate._CoordMetaData`.""" + +from __future__ import annotations + +import numpy as np +import pytest + +from iris._concatenate import ( + _CONSTANT, + _DECREASING, + _INCREASING, + _CoordMetaData, +) + +from . import ExpectedItem, create_metadata + + +def check(actual: _CoordMetaData, expected: ExpectedItem) -> None: + """Assert actual and expected results.""" + assert actual.defn == expected.defn + assert actual.dims == expected.dims + assert actual.points_dtype == expected.points_dtype + assert actual.bounds_dtype == expected.bounds_dtype + assert actual.kwargs == expected.kwargs + + +@pytest.mark.parametrize("order", [_DECREASING, _INCREASING]) +@pytest.mark.parametrize("circular", [False, True]) +@pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) +@pytest.mark.parametrize("lazy", [False, True]) +@pytest.mark.parametrize("with_bounds", [False, True]) +def test_dim( + order: int, + circular: bool, + coord_dtype: np.dtype, + lazy: bool, + with_bounds: bool, +) -> None: + """Test :class:`iris._concatenate._CoordMetaData` with dim coord.""" + metadata = create_metadata( + dim_coord=True, + scalar=False, + order=order, + circular=circular, + coord_dtype=coord_dtype, + lazy=lazy, + with_bounds=with_bounds, + ) + actual = _CoordMetaData(coord=metadata.coord, dims=metadata.dims) + check(actual, metadata.expected) + + +@pytest.mark.parametrize("circular", [False, True]) +@pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) +@pytest.mark.parametrize("lazy", [False, True]) +@pytest.mark.parametrize("with_bounds", [False, True]) +def test_dim__scalar( + circular: bool, coord_dtype: np.dtype, lazy: bool, with_bounds: bool +) -> None: + """Test :class:`iris._concatenate._CoordMetaData` with scalar dim coord.""" + metadata = create_metadata( + dim_coord=True, + scalar=True, + order=_CONSTANT, + circular=circular, + coord_dtype=coord_dtype, + lazy=lazy, + with_bounds=with_bounds, + ) + actual = _CoordMetaData(coord=metadata.coord, dims=metadata.dims) + check(actual, metadata.expected) + + +@pytest.mark.parametrize("order", [_DECREASING, _INCREASING]) +@pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) +@pytest.mark.parametrize("lazy", [False, True]) +@pytest.mark.parametrize("with_bounds", [False, True]) +def test_aux( + order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool +) -> None: + """Test :class:`iris._concatenate._CoordMetaData` with aux coord.""" + metadata = create_metadata( + dim_coord=False, + scalar=False, + order=order, + circular=None, + coord_dtype=coord_dtype, + lazy=lazy, + with_bounds=with_bounds, + ) + actual = _CoordMetaData(coord=metadata.coord, dims=metadata.dims) + check(actual, metadata.expected) + + +@pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) +@pytest.mark.parametrize("lazy", [False, True]) +@pytest.mark.parametrize("with_bounds", [False, True]) +def test_aux__scalar( + coord_dtype: np.dtype, lazy: bool, with_bounds: bool +) -> None: + """Test :class:`iris._concatenate._CoordMetaData` with scalar aux coord.""" + metadata = create_metadata( + dim_coord=False, + scalar=True, + order=_CONSTANT, + circular=None, + coord_dtype=coord_dtype, + lazy=lazy, + with_bounds=with_bounds, + ) + actual = _CoordMetaData(coord=metadata.coord, dims=metadata.dims) + check(actual, metadata.expected) diff --git a/lib/iris/tests/unit/concatenate/test__CoordSignature.py b/lib/iris/tests/unit/concatenate/test__CoordSignature.py new file mode 100644 index 0000000000..eb62c5ec64 --- /dev/null +++ b/lib/iris/tests/unit/concatenate/test__CoordSignature.py @@ -0,0 +1,121 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit-tests for :class:`iris._concatenate._CoordSignature`.""" + +from __future__ import annotations + +from dataclasses import dataclass, field + +import numpy as np +import pytest + +from iris._concatenate import ( + _CONSTANT, + _DECREASING, + _INCREASING, + _CoordExtent, + _CoordMetaData, + _CoordSignature, + _Extent, +) +from iris.coords import DimCoord + +from . import N_POINTS, SCALE_FACTOR, create_metadata + + +@dataclass +class MockCubeSignature: + """Simple mock of :class:`iris._concatenate._CubeSignature`.""" + + aux_coords_and_dims: bool | None = None + cell_measures_and_dims: bool | None = None + ancillary_variables_and_dims: bool | None = None + derived_coords_and_dims: bool | None = None + dim_coords: list[DimCoord, ...] = field(default_factory=list) + dim_mapping: bool | None = None + dim_extents: list[_Extent, ...] = field(default_factory=list) + dim_order: list[int, ...] = field(default_factory=list) + dim_metadata: list[_CoordMetaData, ...] = field(default_factory=list) + + +@pytest.mark.parametrize("order", [_DECREASING, _INCREASING]) +@pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) +@pytest.mark.parametrize("lazy", [False, True]) +@pytest.mark.parametrize("with_bounds", [False, True]) +def test_dim( + order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool +) -> None: + """Test extent calculation of vector dimension coordinates.""" + metadata = create_metadata( + dim_coord=True, + scalar=False, + order=order, + coord_dtype=coord_dtype, + lazy=lazy, + with_bounds=with_bounds, + ) + dim_metadata = [_CoordMetaData(metadata.coord, metadata.dims)] + cube_signature = MockCubeSignature( + dim_coords=[metadata.coord], dim_metadata=dim_metadata + ) + coord_signature = _CoordSignature(cube_signature) + assert len(coord_signature.dim_extents) == 1 + (actual,) = coord_signature.dim_extents + first, last = coord_dtype(0), coord_dtype((N_POINTS - 1) * SCALE_FACTOR) + if order == _CONSTANT: + emsg = f"Expected 'order' of '{_DECREASING}' or '{_INCREASING}', got '{order}'." + raise ValueError(emsg) + points_extent = _Extent(min=first, max=last) + bounds_extent = None + if with_bounds: + offset = SCALE_FACTOR // 2 + if order == _INCREASING: + bounds_extent = ( + _Extent(min=first - offset, max=last - offset), + _Extent(min=first + offset, max=last + offset), + ) + else: + bounds_extent = ( + _Extent(min=first + offset, max=last + offset), + _Extent(min=first - offset, max=last - offset), + ) + expected = _CoordExtent(points=points_extent, bounds=bounds_extent) + assert actual == expected + + +@pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) +@pytest.mark.parametrize("lazy", [False, True]) +@pytest.mark.parametrize("with_bounds", [False, True]) +def test_dim__scalar( + coord_dtype: np.dtype, lazy: bool, with_bounds: bool +) -> None: + """Test extent calculation of scalar dimension coordinates.""" + metadata = create_metadata( + dim_coord=True, + scalar=True, + order=_CONSTANT, + coord_dtype=coord_dtype, + lazy=lazy, + with_bounds=with_bounds, + ) + dim_metadata = [_CoordMetaData(metadata.coord, metadata.dims)] + cube_signature = MockCubeSignature( + dim_coords=[metadata.coord], dim_metadata=dim_metadata + ) + coord_signature = _CoordSignature(cube_signature) + assert len(coord_signature.dim_extents) == 1 + (actual,) = coord_signature.dim_extents + point = coord_dtype(1) + points_extent = _Extent(min=point, max=point) + bounds_extent = None + if with_bounds: + first, last = coord_dtype(0), coord_dtype(2) + bounds_extent = ( + _Extent(min=first, max=first), + _Extent(min=last, max=last), + ) + expected = _CoordExtent(points=points_extent, bounds=bounds_extent) + assert actual == expected From f436cbce09b70026c06763b0e5c8a68abe4e3ae7 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 26 Jun 2023 13:10:03 +0100 Subject: [PATCH 012/134] Simplify and lazify broadcast_to_shape (#5307) (#5359) * Simplify and lazify broadcast_to_shape (#5307) * working for all except masked lazy * use moveaxis * handle lazy masked case * add tests for is_lazy_masked_data * whatsnew * check compute isn't called * update docstring * add whatnew patch footnote --------- Co-authored-by: Ruth Comer <10599679+rcomer@users.noreply.github.com> --- docs/src/whatsnew/3.6.rst | 10 ++++ lib/iris/_lazy_data.py | 9 ++++ .../lazy_data/test_is_lazy_masked_data.py | 27 ++++++++++ .../unit/util/test_broadcast_to_shape.py | 28 ++++++++++ lib/iris/util.py | 54 +++++++++---------- 5 files changed, 98 insertions(+), 30 deletions(-) create mode 100644 lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py diff --git a/docs/src/whatsnew/3.6.rst b/docs/src/whatsnew/3.6.rst index 136d47e15d..c81307a7ba 100644 --- a/docs/src/whatsnew/3.6.rst +++ b/docs/src/whatsnew/3.6.rst @@ -70,6 +70,12 @@ This document explains the changes made to Iris for this release The patches in this release of Iris include: + ✨ **Features** + + #. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles + lazy data. This pull-request has been included to support :pull:`5341`. + (:pull:`5307`) [``pre-v3.7.0``] + 🐛 **Bugs Fixed** #. `@stephenworsley`_ fixed :meth:`~iris.cube.Cube.convert_units` to allow unit @@ -90,6 +96,10 @@ This document explains the changes made to Iris for this release minimal in-core memory footprint. (:issue:`5115`, :pull:`5142`) + Note that, the above contribution labelled with ``pre-v3.7.0`` is part of the + forthcoming Iris ``v3.7.0`` release, but requires to be included in this patch + release. + 📢 Announcements ================ diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index e0566fc8f2..4c294a7d2f 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -47,6 +47,15 @@ def is_lazy_data(data): return result +def is_lazy_masked_data(data): + """ + Return True if the argument is both an Iris 'lazy' data array and the + underlying array is of masked type. Otherwise return False. + + """ + return is_lazy_data(data) and ma.isMA(da.utils.meta_from_array(data)) + + @lru_cache def _optimum_chunksize_internals( chunks, diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py new file mode 100644 index 0000000000..4d627a706b --- /dev/null +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py @@ -0,0 +1,27 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Test function :func:`iris._lazy data.is_lazy_masked_data`.""" + +import dask.array as da +import numpy as np +import pytest + +from iris._lazy_data import is_lazy_masked_data + +real_arrays = [ + np.arange(3), + np.ma.array(range(3)), + np.ma.array(range(3), mask=[0, 1, 1]), +] +lazy_arrays = [da.from_array(arr) for arr in real_arrays] + + +@pytest.mark.parametrize( + "arr, expected", zip(real_arrays + lazy_arrays, [False] * 4 + [True] * 2) +) +def test_is_lazy_masked_data(arr, expected): + result = is_lazy_masked_data(arr) + assert result is expected diff --git a/lib/iris/tests/unit/util/test_broadcast_to_shape.py b/lib/iris/tests/unit/util/test_broadcast_to_shape.py index 36f00fa53f..3df1634ba5 100644 --- a/lib/iris/tests/unit/util/test_broadcast_to_shape.py +++ b/lib/iris/tests/unit/util/test_broadcast_to_shape.py @@ -9,6 +9,10 @@ # importing anything else import iris.tests as tests # isort:skip +from unittest import mock + +import dask +import dask.array as da import numpy as np import numpy.ma as ma @@ -40,6 +44,17 @@ def test_added_dimensions_transpose(self): for j in range(4): self.assertArrayEqual(b[i, :, j, :].T, a) + @mock.patch.object(dask.base, "compute", wraps=dask.base.compute) + def test_lazy_added_dimensions_transpose(self, mocked_compute): + # adding dimensions and having the dimensions of the input + # transposed + a = da.random.random([2, 3]) + b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1)) + mocked_compute.assert_not_called() + for i in range(5): + for j in range(4): + self.assertArrayEqual(b[i, :, j, :].T.compute(), a.compute()) + def test_masked(self): # masked arrays are also accepted a = np.random.random([2, 3]) @@ -49,6 +64,19 @@ def test_masked(self): for j in range(4): self.assertMaskedArrayEqual(b[i, :, j, :].T, m) + @mock.patch.object(dask.base, "compute", wraps=dask.base.compute) + def test_lazy_masked(self, mocked_compute): + # masked arrays are also accepted + a = np.random.random([2, 3]) + m = da.ma.masked_array(a, mask=[[0, 1, 0], [0, 1, 1]]) + b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) + mocked_compute.assert_not_called() + for i in range(5): + for j in range(4): + self.assertMaskedArrayEqual( + b[i, :, j, :].compute().T, m.compute() + ) + def test_masked_degenerate(self): # masked arrays can have degenerate masks too a = np.random.random([2, 3]) diff --git a/lib/iris/util.py b/lib/iris/util.py index d96e0ee359..0b31ebdafc 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -23,7 +23,7 @@ import numpy.ma as ma from iris._deprecation import warn_deprecated -from iris._lazy_data import as_concrete_data, is_lazy_data +from iris._lazy_data import as_concrete_data, is_lazy_data, is_lazy_masked_data from iris.common import SERVICES from iris.common.lenient import _lenient_client import iris.exceptions @@ -34,8 +34,7 @@ def broadcast_to_shape(array, shape, dim_map): Broadcast an array to a given shape. Each dimension of the array must correspond to a dimension in the - given shape. Striding is used to repeat the array until it matches - the desired shape, returning repeated views on the original array. + given shape. The result is a read-only view (see :func:`numpy.broadcast_to`). If you need to write to the resulting array, make a copy first. Args: @@ -76,35 +75,30 @@ def broadcast_to_shape(array, shape, dim_map): See more at :doc:`/userguide/real_and_lazy_data`. """ - if len(dim_map) != array.ndim: - # We must check for this condition here because we cannot rely on - # getting an error from numpy if the dim_map argument is not the - # correct length, we might just get a segfault. - raise ValueError( - "dim_map must have an entry for every " - "dimension of the input array" - ) + n_orig_dims = len(array.shape) + n_new_dims = len(shape) - n_orig_dims + array = array.reshape(array.shape + (1,) * n_new_dims) + + # Get dims in required order. + array = np.moveaxis(array, range(n_orig_dims), dim_map) + new_array = np.broadcast_to(array, shape) - def _broadcast_helper(a): - strides = [0] * len(shape) - for idim, dim in enumerate(dim_map): - if shape[dim] != a.shape[idim]: - # We'll get garbage values if the dimensions of array are not - # those indicated by shape. - raise ValueError("shape and array are not compatible") - strides[dim] = a.strides[idim] - return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides) - - array_view = _broadcast_helper(array) - if ma.isMaskedArray(array): - if array.mask is ma.nomask: - # Degenerate masks can be applied as-is. - mask_view = array.mask + if ma.isMA(array): + # broadcast_to strips masks so we need to handle them explicitly. + mask = ma.getmask(array) + if mask is ma.nomask: + new_mask = ma.nomask else: - # Mask arrays need to be handled in the same way as the data array. - mask_view = _broadcast_helper(array.mask) - array_view = ma.array(array_view, mask=mask_view) - return array_view + new_mask = np.broadcast_to(mask, shape) + new_array = ma.array(new_array, mask=new_mask) + + elif is_lazy_masked_data(array): + # broadcast_to strips masks so we need to handle them explicitly. + mask = da.ma.getmaskarray(array) + new_mask = da.broadcast_to(mask, shape) + new_array = da.ma.masked_array(new_array, new_mask) + + return new_array def delta(ndarray, dimension, circular=False): From bbba8072b30ca78696a67d4b44b5eb1df9559d7c Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 13:39:39 +0100 Subject: [PATCH 013/134] Updated environment lockfiles (#5358) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 22 +++++++++++----------- requirements/locks/py311-linux-64.lock | 20 ++++++++++---------- requirements/locks/py39-linux-64.lock | 20 ++++++++++---------- 3 files changed, 31 insertions(+), 31 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index a0632e193e..fa6050a524 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -77,7 +77,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 -https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 @@ -86,7 +86,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.b https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_3.conda#3cbb1d20331e8b1f170de5cef410cd80 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_0.conda#aa8b86066614c4573f6db62c91978fa9 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 @@ -104,9 +104,9 @@ https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openb https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_3.conda#247055daf6d75d72ac0e94de151e8ab0 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_0.conda#276339b0115d92c6e0793dcdc7afe308 https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 -https://conda.anaconda.org/conda-forge/linux-64/python-3.10.11-he550d4f_0_cpython.conda#7439c9d24378a82b73a7a53868dacdf1 +https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 @@ -153,7 +153,7 @@ https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_0.conda#5597d9f9778af6883ae64f0e7d39416c https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py310h8deb116_0.conda#b7085457309e206174b8e234d90a7605 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.0-py310ha4c1d20_0.conda#03319f78e5c9c8d90c0110e2c6ed24f6 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 @@ -161,14 +161,14 @@ https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd715 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h1fa729e_0.conda#b0f0a014fc04012c05f39df15fe270ce https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.0-pyhd8ed1ab_0.conda#d3ed087d1f7f8f5590e8e87b57a8ce64 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py310h1fa729e_0.conda#8d155ac95b1dfe585bcb6bec6a91c73b https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.2-pyhd8ed1ab_0.conda#3b68bc43ec6baa48f7354a446267eefe +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -206,7 +206,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py310h5764c6d_1.t https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py310h2372a71_0.conda#d3d83b419c81ac718a9221442707882b https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.7.0-pyha770c72_0.conda#ba3786c6846e46038fe60c785d46dc81 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 @@ -228,12 +228,12 @@ https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#a https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.7.0-hd8ed1ab_0.conda#27a4cec373ec84d1c1aa02a1e37f8eaf https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py310he60537e_0.conda#68b2dd34c69d08b05a9db5e3596fe3ee https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py310h7cbd5c2_0.conda#e0b845c6b29a1ed2e409bef6c0f5d96b https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.3-pyhd8ed1ab_0.conda#c085a16ba3d0c9ee282c438308b57724 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.6.0-pyhd8ed1ab_0.conda#741384b21c1b512617f4ee4ea8457c5d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py310h24ef57a_1.conda#a689e86d7bbab67f889fc384aa72b088 https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310heca2aa9_3.conda#3b1946b676534472ce65181dda0b9554 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da @@ -251,7 +251,7 @@ https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#c https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.1-pyhd8ed1ab_0.conda#838b85f656b078bdd882ef97978e7f40 https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 81e5cb1048..e0919fe2ef 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -77,7 +77,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 -https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 @@ -86,7 +86,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.b https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_3.conda#3cbb1d20331e8b1f170de5cef410cd80 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_0.conda#aa8b86066614c4573f6db62c91978fa9 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 @@ -104,7 +104,7 @@ https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openb https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_3.conda#247055daf6d75d72ac0e94de151e8ab0 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_0.conda#276339b0115d92c6e0793dcdc7afe308 https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.4-hab00c5b_0_cpython.conda#1c628861a2a126b9fc9363ca1b7d014e https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc @@ -153,7 +153,7 @@ https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_0.conda#9904dc4adb5d547cb21e136f98cb24b0 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py311h8e6699e_0.conda#90db8cc0dfa20853329bfc6642f887aa +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.0-py311h64a7726_0.conda#4df60430eca64502eb01e02df92246bf https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 @@ -161,14 +161,14 @@ https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd715 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h2582759_0.conda#a90f8e278c1cd7064b2713e6b7db87e6 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.0-pyhd8ed1ab_0.conda#d3ed087d1f7f8f5590e8e87b57a8ce64 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py311h2582759_0.conda#dfcc3e6e30d6ec2b2bb416fcd8ff4dc1 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py311hd4cff14_5.tar.bz2#da8769492e423103c59f469f4f17f8d9 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.2-pyhd8ed1ab_0.conda#3b68bc43ec6baa48f7354a446267eefe +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -205,7 +205,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py311hd4cff14_1.t https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py311h459d7ec_0.conda#b19f671a6b221f922cf871d71a71c0fa https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.7.0-pyha770c72_0.conda#ba3786c6846e46038fe60c785d46dc81 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 @@ -227,12 +227,12 @@ https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#a https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.7.0-hd8ed1ab_0.conda#27a4cec373ec84d1c1aa02a1e37f8eaf https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py311h8597a09_0.conda#70c3b734ffe82c16b6d121aaa11929a8 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py311h320fe9a_0.conda#509769b430266dc5c2f6a3eab0f23164 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.3-pyhd8ed1ab_0.conda#c085a16ba3d0c9ee282c438308b57724 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.6.0-pyhd8ed1ab_0.conda#741384b21c1b512617f4ee4ea8457c5d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py311ha169711_1.conda#92633556d37e88ce45193374d408072c https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py311hcafe171_3.conda#0d79df2a96f6572fed2883374400b235 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da @@ -250,7 +250,7 @@ https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#c https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.1-pyhd8ed1ab_0.conda#838b85f656b078bdd882ef97978e7f40 https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 58fbb19b52..a7b390c21d 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -77,7 +77,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 -https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 @@ -86,7 +86,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.b https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-hf1915f5_3.conda#3cbb1d20331e8b1f170de5cef410cd80 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_0.conda#aa8b86066614c4573f6db62c91978fa9 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 @@ -104,7 +104,7 @@ https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openb https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hca2cd23_3.conda#247055daf6d75d72ac0e94de151e8ab0 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_0.conda#276339b0115d92c6e0793dcdc7afe308 https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.16-h2782a2a_0_cpython.conda#95c9b7c96a7fd7342e0c9d0a917b8f78 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc @@ -153,7 +153,7 @@ https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_0.conda#9c858d105816f454c6b64f3e19184b60 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.23.5-py39h3d75532_0.conda#ea5d332e361eb72c2593cf79559bc0ec +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.0-py39h6183b62_0.conda#02b87fef8e4c72be8256435ed59fe8de https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 @@ -161,14 +161,14 @@ https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd715 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39h72bdee0_0.conda#1d54d3a75c3192ab7655d9c3d16809f1 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.0-pyhd8ed1ab_0.conda#d3ed087d1f7f8f5590e8e87b57a8ce64 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py39h72bdee0_0.conda#18927f971926b7271600368de71de557 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.2-pyhd8ed1ab_0.conda#3b68bc43ec6baa48f7354a446267eefe +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -205,7 +205,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py39hb9d737c_1.ta https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py39hd1e30aa_0.conda#5f7c468bf9d9551a80187db7e809ef1f https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.7.0-pyha770c72_0.conda#ba3786c6846e46038fe60c785d46dc81 https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.12.0-pyhd8ed1ab_0.conda#e5fd2260a231ee63b6969f4801082f2b https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a @@ -229,11 +229,11 @@ https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.co https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df https://conda.anaconda.org/conda-forge/noarch/importlib-resources-5.12.0-pyhd8ed1ab_0.conda#3544c818f0720c89eb16ae6940ab440b -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.7.0-hd8ed1ab_0.conda#27a4cec373ec84d1c1aa02a1e37f8eaf https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py39h40cae4c_0.conda#de99b3f807c0b295a7df94623df0fb4c https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.3-pyhd8ed1ab_0.conda#c085a16ba3d0c9ee282c438308b57724 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.6.0-pyhd8ed1ab_0.conda#741384b21c1b512617f4ee4ea8457c5d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py39h5ed0f51_1.conda#9c455b3b3b55f13b2094932740cd3efb https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h227be39_3.conda#9e381db00691e26bcf670c3586397be1 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b @@ -250,7 +250,7 @@ https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#c https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.1-pyhd8ed1ab_0.conda#838b85f656b078bdd882ef97978e7f40 https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 From e0df17c291ceede6bd6c9d3a2c93ddf2e000a4bd Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Mon, 26 Jun 2023 15:52:36 +0200 Subject: [PATCH 014/134] Make weighted aggregation lazy again (#5341) * Make weighted aggregation lazy again * kwarg unit is not necessary anymore for _Weights * Added test cases for _weights_units=None * Updated doc * Make sure that (Weighted)PercentileAggregator is included in API doc * Simplified tests * Make weights handling simpler by explicitly calling _Weights constructor * Added What's new entry * Do not change input units if the weights units are 1 --- docs/src/whatsnew/3.6.rst | 4 + lib/iris/analysis/__init__.py | 148 ++++++++------- lib/iris/cube.py | 52 +++--- lib/iris/tests/test_analysis.py | 258 ++++++++++---------------- lib/iris/tests/unit/cube/test_Cube.py | 54 +++--- 5 files changed, 234 insertions(+), 282 deletions(-) diff --git a/docs/src/whatsnew/3.6.rst b/docs/src/whatsnew/3.6.rst index c81307a7ba..892da3310d 100644 --- a/docs/src/whatsnew/3.6.rst +++ b/docs/src/whatsnew/3.6.rst @@ -87,6 +87,10 @@ This document explains the changes made to Iris for this release properly updated. (:issue:`5339`, :pull:`5340`) + #. `@schlunma`_ fixed a bug which realized all weights during weighted + aggregation. Now weighted aggregation is fully lazy again. + (:issue:`5338`, :pull:`5341`) + 🚀 **Performance Enhancements** #. `@sloosvel`_ improved :meth:`~iris.cube.CubeList.concatenate_cube` and diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 4cd9ccbe05..f00c3dd850 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -81,6 +81,7 @@ "PEAK", "PERCENTILE", "PROPORTION", + "PercentileAggregator", "PointInCell", "RMS", "STD_DEV", @@ -89,6 +90,7 @@ "VARIANCE", "WPERCENTILE", "WeightedAggregator", + "WeightedPercentileAggregator", "clear_phenomenon_identity", "create_weighted_aggregator_fn", ) @@ -488,7 +490,7 @@ def __init__( aggregation. Note that, it need not support all features of the main operation, but should raise an error in unhandled cases. - Additional kwargs:: + Additional kwargs: Passed through to :data:`call_func`, :data:`lazy_func`, and :data:`units_func`. @@ -719,9 +721,12 @@ def __init__(self, units_func=None, **kwargs): If provided, called to convert a cube's units. Returns an :class:`cf_units.Unit`, or a value that can be made into one. + To ensure backwards-compatibility, also accepts a callable with + call signature (units). - Additional kwargs:: - Passed through to :data:`call_func` and :data:`lazy_func`. + Additional kwargs: + Passed through to :data:`call_func`, :data:`lazy_func`, and + :data:`units_func`. This aggregator can used by cube aggregation methods such as :meth:`~iris.cube.Cube.collapsed` and @@ -960,14 +965,27 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): If provided, called to convert a cube's units. Returns an :class:`cf_units.Unit`, or a value that can be made into one. + To ensure backwards-compatibility, also accepts a callable with + call signature (units). + + If the aggregator is used by a cube aggregation method (e.g., + :meth:`~iris.cube.Cube.collapsed`, + :meth:`~iris.cube.Cube.aggregated_by`, + :meth:`~iris.cube.Cube.rolling_window`), a keyword argument + `_weights_units` is provided to this function to allow updating + units based on the weights. `_weights_units` is determined from the + `weights` given to the aggregator (``None`` if no weights are + given). See :ref:`user guide ` + for an example of weighted aggregation that changes units. * lazy_func (callable or None): An alternative to :data:`call_func` implementing a lazy aggregation. Note that, it need not support all features of the main operation, but should raise an error in unhandled cases. - Additional kwargs:: - Passed through to :data:`call_func` and :data:`lazy_func`. + Additional kwargs: + Passed through to :data:`call_func`, :data:`lazy_func`, and + :data:`units_func`. This aggregator can used by cube aggregation methods such as :meth:`~iris.cube.Cube.collapsed` and @@ -1090,7 +1108,7 @@ class WeightedAggregator(Aggregator): def __init__( self, cell_method, call_func, units_func=None, lazy_func=None, **kwargs ): - """ + r""" Create a weighted aggregator for the given :data:`call_func`. Args: @@ -1099,12 +1117,29 @@ def __init__( Cell method string that supports string format substitution. * call_func (callable): - Data aggregation function. Call signature `(data, axis, **kwargs)`. + Data aggregation function. Call signature `(data, axis, + \**kwargs)`. Kwargs: * units_func (callable): - Units conversion function. + | *Call signature*: (units, \**kwargs) + + If provided, called to convert a cube's units. + Returns an :class:`cf_units.Unit`, or a + value that can be made into one. + To ensure backwards-compatibility, also accepts a callable with + call signature (units). + + If the aggregator is used by a cube aggregation method (e.g., + :meth:`~iris.cube.Cube.collapsed`, + :meth:`~iris.cube.Cube.aggregated_by`, + :meth:`~iris.cube.Cube.rolling_window`), a keyword argument + `_weights_units` is provided to this function to allow updating + units based on the weights. `_weights_units` is determined from the + `weights` given to the aggregator (``None`` if no weights are + given). See :ref:`user guide ` + for an example of weighted aggregation that changes units. * lazy_func (callable or None): An alternative to :data:`call_func` implementing a lazy @@ -1112,7 +1147,8 @@ def __init__( main operation, but should raise an error in unhandled cases. Additional kwargs: - Passed through to :data:`call_func` and :data:`lazy_func`. + Passed through to :data:`call_func`, :data:`lazy_func`, and + :data:`units_func`. """ Aggregator.__init__( @@ -1187,20 +1223,18 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): return result -class _Weights(np.ndarray): +class _Weights: """Class for handling weights for weighted aggregation. - This subclasses :class:`numpy.ndarray`; thus, all methods and properties of - :class:`numpy.ndarray` (e.g., `shape`, `ndim`, `view()`, etc.) are - available. + Provides the following two attributes: - Details on subclassing :class:`numpy.ndarray` are given here: - https://numpy.org/doc/stable/user/basics.subclassing.html + * ``array``: Lazy or non-lazy array of weights. + * ``units``: Units associated with the weights. """ - def __new__(cls, weights, cube, units=None): - """Create class instance. + def __init__(self, weights, cube): + """Initialize class instance. Args: @@ -1212,18 +1246,14 @@ def __new__(cls, weights, cube, units=None): one of :meth:`iris.cube.Cube.coords`, :meth:`iris.cube.Cube.cell_measures`, or :meth:`iris.cube.Cube.ancillary_variables`). If given as an - array-like object, use this directly and assume units of `1`. If - `units` is given, ignore all units derived above and use the ones - given by `units`. + array-like object, use this directly and assume units of `1`. Note: + this does **not** create a copy of the input array. * cube (Cube): Input cube for aggregation. If weights is given as :obj:`str` or :class:`iris.coords._DimensionalMetadata`, try to extract the :class:`iris.coords._DimensionalMetadata` object and corresponding dimensional mappings from this cube. Otherwise, this argument is ignored. - * units (string, Unit): - If ``None``, use units derived from `weights`. Otherwise, overwrite - the units derived from `weights` and use `units`. """ # `weights` is a cube @@ -1231,8 +1261,8 @@ def __new__(cls, weights, cube, units=None): # "hasattr" syntax here # --> Extract data and units from cube if hasattr(weights, "add_aux_coord"): - obj = np.asarray(weights.data).view(cls) - obj.units = weights.units + derived_array = weights.core_data() + derived_units = weights.units # `weights`` is a string or _DimensionalMetadata object # --> Extract _DimensionalMetadata object from cube, broadcast it to @@ -1240,55 +1270,23 @@ def __new__(cls, weights, cube, units=None): # its data and units elif isinstance(weights, (str, _DimensionalMetadata)): dim_metadata = cube._dimensional_metadata(weights) - arr = dim_metadata._values + derived_array = dim_metadata._core_values() if dim_metadata.shape != cube.shape: - arr = iris.util.broadcast_to_shape( - arr, + derived_array = iris.util.broadcast_to_shape( + derived_array, cube.shape, dim_metadata.cube_dims(cube), ) - obj = np.asarray(arr).view(cls) - obj.units = dim_metadata.units + derived_units = dim_metadata.units - # Remaining types (e.g., np.ndarray): try to convert to ndarray. + # Remaining types (e.g., np.ndarray, dask.array.core.Array, etc.) + # --> Use array directly and assign units of "1" else: - obj = np.asarray(weights).view(cls) - obj.units = Unit("1") - - # Overwrite units from units argument if necessary - if units is not None: - obj.units = units + derived_array = weights + derived_units = Unit("1") - return obj - - def __array_finalize__(self, obj): - """See https://numpy.org/doc/stable/user/basics.subclassing.html. - - Note - ---- - `obj` cannot be `None` here since ``_Weights.__new__`` does not call - ``super().__new__`` explicitly. - - """ - self.units = getattr(obj, "units", Unit("1")) - - @classmethod - def update_kwargs(cls, kwargs, cube): - """Update ``weights`` keyword argument in-place. - - Args: - - * kwargs (dict): - Keyword arguments that will be updated in-place if a `weights` - keyword is present which is not ``None``. - * cube (Cube): - Input cube for aggregation. If weights is given as :obj:`str`, try - to extract a cell measure with the corresponding name from this - cube. Otherwise, this argument is ignored. - - """ - if kwargs.get("weights") is not None: - kwargs["weights"] = cls(kwargs["weights"], cube) + self.array = derived_array + self.units = derived_units def create_weighted_aggregator_fn(aggregator_fn, axis, **kwargs): @@ -1752,11 +1750,17 @@ def _sum(array, **kwargs): def _sum_units_func(units, **kwargs): """Multiply original units with weight units if possible.""" weights = kwargs.get("weights") - if weights is None: # no weights given or weights are None - result = units - elif hasattr(weights, "units"): # weights are _Weights - result = units * weights.units - else: # weights are regular np.ndarrays + weights_units = kwargs.get("_weights_units") + multiply_by_weights_units = all( + [ + weights is not None, + weights_units is not None, + weights_units != "1", + ] + ) + if multiply_by_weights_units: + result = units * weights_units + else: result = units return result diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 44a40cf72b..877c6914bd 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -3836,7 +3836,10 @@ def collapsed(self, coords, aggregator, **kwargs): """ # Update weights kwargs (if necessary) to handle different types of # weights - _Weights.update_kwargs(kwargs, self) + weights_info = None + if kwargs.get("weights") is not None: + weights_info = _Weights(kwargs["weights"], self) + kwargs["weights"] = weights_info.array # Convert any coordinate names to coordinates coords = self._as_list_of_coords(coords) @@ -3981,7 +3984,11 @@ def collapsed(self, coords, aggregator, **kwargs): ) aggregator.update_metadata( - collapsed_cube, coords, axis=collapse_axis, **kwargs + collapsed_cube, + coords, + axis=collapse_axis, + _weights_units=getattr(weights_info, "units", None), + **kwargs, ) result = aggregator.post_process( collapsed_cube, data_result, coords, **kwargs @@ -4074,7 +4081,10 @@ def aggregated_by( """ # Update weights kwargs (if necessary) to handle different types of # weights - _Weights.update_kwargs(kwargs, self) + weights_info = None + if kwargs.get("weights") is not None: + weights_info = _Weights(kwargs["weights"], self) + kwargs["weights"] = weights_info.array groupby_coords = [] dimension_to_groupby = None @@ -4114,16 +4124,10 @@ def aggregated_by( f"that is aggregated, got {len(weights):d}, expected " f"{self.shape[dimension_to_groupby]:d}" ) - - # iris.util.broadcast_to_shape does not preserve _Weights type - weights = _Weights( - iris.util.broadcast_to_shape( - weights, - self.shape, - (dimension_to_groupby,), - ), - self, - units=weights.units, + weights = iris.util.broadcast_to_shape( + weights, + self.shape, + (dimension_to_groupby,), ) if weights.shape != self.shape: raise ValueError( @@ -4274,7 +4278,11 @@ def aggregated_by( # Add the aggregation meta data to the aggregate-by cube. aggregator.update_metadata( - aggregateby_cube, groupby_coords, aggregate=True, **kwargs + aggregateby_cube, + groupby_coords, + aggregate=True, + _weights_units=getattr(weights_info, "units", None), + **kwargs, ) # Replace the appropriate coordinates within the aggregate-by cube. (dim_coord,) = self.coords( @@ -4413,7 +4421,10 @@ def rolling_window(self, coord, aggregator, window, **kwargs): """ # Update weights kwargs (if necessary) to handle different types of # weights - _Weights.update_kwargs(kwargs, self) + weights_info = None + if kwargs.get("weights") is not None: + weights_info = _Weights(kwargs["weights"], self) + kwargs["weights"] = weights_info.array coord = self._as_list_of_coords(coord)[0] @@ -4500,6 +4511,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): new_cube, [coord], action="with a rolling window of length %s over" % window, + _weights_units=getattr(weights_info, "units", None), **kwargs, ) # and perform the data transformation, generating weights first if @@ -4516,14 +4528,8 @@ def rolling_window(self, coord, aggregator, window, **kwargs): "as the window." ) kwargs = dict(kwargs) - - # iris.util.broadcast_to_shape does not preserve _Weights type - kwargs["weights"] = _Weights( - iris.util.broadcast_to_shape( - weights, rolling_window_data.shape, (dimension + 1,) - ), - self, - units=weights.units, + kwargs["weights"] = iris.util.broadcast_to_shape( + weights, rolling_window_data.shape, (dimension + 1,) ) data_result = aggregator.aggregate( rolling_window_data, axis=dimension + 1, **kwargs diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index 4b36a915aa..0d88a23055 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -15,6 +15,7 @@ import pytest import iris +from iris.analysis import _Weights import iris.analysis.cartography import iris.analysis.maths import iris.coord_systems @@ -1706,23 +1707,33 @@ def test_weights_in_kwargs(self): class TestWeights: @pytest.fixture(autouse=True) def setup_test_data(self): + self.array_lib = np + self.target_type = np.ndarray + self.create_test_data() + + def create_test_data(self): + self.data = self.array_lib.arange(6).reshape(2, 3) self.lat = iris.coords.DimCoord( - [0, 1], standard_name="latitude", units="degrees" + self.array_lib.array([0, 1]), + standard_name="latitude", + units="degrees", ) self.lon = iris.coords.DimCoord( - [0, 1, 2], standard_name="longitude", units="degrees" + self.array_lib.array([0, 1, 2]), + standard_name="longitude", + units="degrees", ) self.cell_measure = iris.coords.CellMeasure( - np.arange(6).reshape(2, 3), standard_name="cell_area", units="m2" + self.data, standard_name="cell_area", units="m2" ) self.aux_coord = iris.coords.AuxCoord( - [3, 4], long_name="auxcoord", units="s" + self.array_lib.array([3, 4]), long_name="auxcoord", units="s" ) self.ancillary_variable = iris.coords.AncillaryVariable( - [5, 6, 7], var_name="ancvar", units="kg" + self.array_lib.array([5, 6, 7]), var_name="ancvar", units="kg" ) self.cube = iris.cube.Cube( - np.arange(6).reshape(2, 3), + self.data, standard_name="air_temperature", units="K", dim_coords_and_dims=[(self.lat, 0), (self.lon, 1)], @@ -1731,188 +1742,95 @@ def setup_test_data(self): ancillary_variables_and_dims=[(self.ancillary_variable, 1)], ) - def test_init_with_weights(self): - weights = iris.analysis._Weights([], self.cube) - new_weights = iris.analysis._Weights(weights, self.cube) - assert isinstance(new_weights, iris.analysis._Weights) - assert new_weights is not weights - np.testing.assert_array_equal(new_weights, []) - assert new_weights.units == "1" - assert weights.units == "1" - - def test_init_with_weights_and_units(self): - weights = iris.analysis._Weights([], self.cube) - new_weights = iris.analysis._Weights(weights, self.cube, units="J") - assert isinstance(new_weights, iris.analysis._Weights) - assert new_weights is not weights - np.testing.assert_array_equal(new_weights, []) - assert new_weights.units == "J" + def test_init_with_array(self): + weights = _Weights(self.data, self.cube) + assert isinstance(weights.array, self.target_type) + assert isinstance(weights.units, cf_units.Unit) + assert weights.array is self.data assert weights.units == "1" def test_init_with_cube(self): - weights = iris.analysis._Weights(self.cube, self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + weights = _Weights(self.cube, self.cube) + assert isinstance(weights.array, self.target_type) + assert isinstance(weights.units, cf_units.Unit) + assert weights.array is self.data assert weights.units == "K" - def test_init_with_cube_and_units(self): - weights = iris.analysis._Weights(self.cube, self.cube, units="J") - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) - assert weights.units == "J" - def test_init_with_str_dim_coord(self): - weights = iris.analysis._Weights("latitude", self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[0, 0, 0], [1, 1, 1]]) + weights = _Weights("latitude", self.cube) + # DimCoord always realizes points + assert isinstance(weights.array, np.ndarray) + assert isinstance(weights.units, cf_units.Unit) + np.testing.assert_array_equal(weights.array, [[0, 0, 0], [1, 1, 1]]) assert weights.units == "degrees" - def test_init_with_str_dim_coord_and_units(self): - weights = iris.analysis._Weights("latitude", self.cube, units="J") - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[0, 0, 0], [1, 1, 1]]) - assert weights.units == "J" - def test_init_with_str_aux_coord(self): - weights = iris.analysis._Weights("auxcoord", self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[3, 3, 3], [4, 4, 4]]) + weights = _Weights("auxcoord", self.cube) + assert isinstance(weights.array, self.target_type) + assert isinstance(weights.units, cf_units.Unit) + np.testing.assert_array_equal(weights.array, [[3, 3, 3], [4, 4, 4]]) assert weights.units == "s" - def test_init_with_str_aux_coord_and_units(self): - weights = iris.analysis._Weights("auxcoord", self.cube, units="J") - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[3, 3, 3], [4, 4, 4]]) - assert weights.units == "J" - def test_init_with_str_ancillary_variable(self): - weights = iris.analysis._Weights("ancvar", self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[5, 6, 7], [5, 6, 7]]) + weights = _Weights("ancvar", self.cube) + assert isinstance(weights.array, self.target_type) + assert isinstance(weights.units, cf_units.Unit) + np.testing.assert_array_equal(weights.array, [[5, 6, 7], [5, 6, 7]]) assert weights.units == "kg" - def test_init_with_str_ancillary_variable_and_units(self): - weights = iris.analysis._Weights("ancvar", self.cube, units="J") - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[5, 6, 7], [5, 6, 7]]) - assert weights.units == "J" - def test_init_with_str_cell_measure(self): - weights = iris.analysis._Weights("cell_area", self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + weights = _Weights("cell_area", self.cube) + assert isinstance(weights.array, self.target_type) + assert isinstance(weights.units, cf_units.Unit) + np.testing.assert_array_equal(weights.array, self.data) assert weights.units == "m2" - def test_init_with_str_cell_measure_and_units(self): - weights = iris.analysis._Weights("cell_area", self.cube, units="J") - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) - assert weights.units == "J" - def test_init_with_dim_coord(self): - weights = iris.analysis._Weights(self.lat, self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[0, 0, 0], [1, 1, 1]]) + weights = _Weights(self.lat, self.cube) + # DimCoord always realizes points + assert isinstance(weights.array, np.ndarray) + assert isinstance(weights.units, cf_units.Unit) + np.testing.assert_array_equal(weights.array, [[0, 0, 0], [1, 1, 1]]) assert weights.units == "degrees" - def test_init_with_dim_coord_and_units(self): - weights = iris.analysis._Weights(self.lat, self.cube, units="J") - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[0, 0, 0], [1, 1, 1]]) - assert weights.units == "J" - def test_init_with_aux_coord(self): - weights = iris.analysis._Weights(self.aux_coord, self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[3, 3, 3], [4, 4, 4]]) + weights = _Weights(self.aux_coord, self.cube) + assert isinstance(weights.array, self.target_type) + assert isinstance(weights.units, cf_units.Unit) + np.testing.assert_array_equal(weights.array, [[3, 3, 3], [4, 4, 4]]) assert weights.units == "s" - def test_init_with_aux_coord_and_units(self): - weights = iris.analysis._Weights(self.aux_coord, self.cube, units="J") - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[3, 3, 3], [4, 4, 4]]) - assert weights.units == "J" - def test_init_with_ancillary_variable(self): - weights = iris.analysis._Weights(self.ancillary_variable, self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[5, 6, 7], [5, 6, 7]]) + weights = _Weights(self.ancillary_variable, self.cube) + assert isinstance(weights.array, self.target_type) + assert isinstance(weights.units, cf_units.Unit) + np.testing.assert_array_equal(weights.array, [[5, 6, 7], [5, 6, 7]]) assert weights.units == "kg" - def test_init_with_ancillary_variable_and_units(self): - weights = iris.analysis._Weights( - self.ancillary_variable, self.cube, units="J" - ) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [[5, 6, 7], [5, 6, 7]]) - assert weights.units == "J" - def test_init_with_cell_measure(self): - weights = iris.analysis._Weights(self.cell_measure, self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) + weights = _Weights(self.cell_measure, self.cube) + assert isinstance(weights.array, self.target_type) + assert isinstance(weights.units, cf_units.Unit) + np.testing.assert_array_equal(weights.array, self.data) assert weights.units == "m2" - def test_init_with_cell_measure_and_units(self): - weights = iris.analysis._Weights( - self.cell_measure, self.cube, units="J" - ) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, np.arange(6).reshape(2, 3)) - assert weights.units == "J" - def test_init_with_list(self): - weights = iris.analysis._Weights([1, 2, 3], self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [1, 2, 3]) - assert weights.units == "1" - - def test_init_with_list_and_units(self): - weights = iris.analysis._Weights([1, 2, 3], self.cube, units="J") - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, [1, 2, 3]) - assert weights.units == "J" - - def test_init_with_ndarray(self): - weights = iris.analysis._Weights(np.zeros((5, 5)), self.cube) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, np.zeros((5, 5))) + list_in = [0, 1, 2] + weights = _Weights(list_in, self.cube) + assert isinstance(weights.array, list) + assert isinstance(weights.units, cf_units.Unit) + assert weights.array is list_in assert weights.units == "1" - def test_init_with_ndarray_and_units(self): - weights = iris.analysis._Weights( - np.zeros((5, 5)), self.cube, units="J" - ) - assert isinstance(weights, iris.analysis._Weights) - np.testing.assert_array_equal(weights, np.zeros((5, 5))) - assert weights.units == "J" - - def test_init_with_invalid_obj(self): - with pytest.raises(KeyError): - iris.analysis._Weights("invalid_obj", self.cube) - - def test_init_with_invalid_obj_and_units(self): - with pytest.raises(KeyError): - iris.analysis._Weights("invalid_obj", self.cube, units="J") - def test_update_kwargs_no_weights(self): - kwargs = {"test": [1, 2, 3]} - iris.analysis._Weights.update_kwargs(kwargs, self.cube) - assert kwargs == {"test": [1, 2, 3]} +class TestWeightsLazy(TestWeights): + """Repeat tests from ``TestWeights`` with lazy arrays.""" - def test_update_kwargs_weights_none(self): - kwargs = {"test": [1, 2, 3], "weights": None} - iris.analysis._Weights.update_kwargs(kwargs, self.cube) - assert kwargs == {"test": [1, 2, 3], "weights": None} - - def test_update_kwargs_weights(self): - kwargs = {"test": [1, 2, 3], "weights": [1, 2]} - iris.analysis._Weights.update_kwargs(kwargs, self.cube) - assert len(kwargs) == 2 - assert kwargs["test"] == [1, 2, 3] - assert isinstance(kwargs["weights"], iris.analysis._Weights) - np.testing.assert_array_equal(kwargs["weights"], [1, 2]) - assert kwargs["weights"].units == "1" + @pytest.fixture(autouse=True) + def setup_test_data(self): + self.array_lib = da + self.target_type = da.core.Array + self.create_test_data() def test__Groupby_repr(): @@ -1926,25 +1844,37 @@ def test__Groupby_repr(): assert repr(grouper) == "_Groupby(['year'], shared_coords=['time'])" -CUBE = iris.cube.Cube(0) - - @pytest.mark.parametrize( "kwargs,expected", [ - ({}, "s"), - ({"test": "m"}, "s"), - ({"weights": None}, "s"), - ({"weights": [1, 2, 3]}, "s"), - ({"weights": iris.analysis._Weights([1], CUBE)}, "s"), - ({"weights": iris.analysis._Weights([1], CUBE, units="kg")}, "s kg"), + ({}, "kg m-2"), + ({"test": "m"}, "kg m-2"), + ({"weights": None}, "kg m-2"), + ({"weights": [1, 2, 3]}, "kg m-2"), + ({"_weights_units": None}, "kg m-2"), + ({"test": "m", "_weights_units": None}, "kg m-2"), + ({"weights": None, "_weights_units": None}, "kg m-2"), + ({"weights": [1, 2, 3], "_weights_units": None}, "kg m-2"), + ({"_weights_units": "1"}, "kg m-2"), + ({"test": "m", "_weights_units": "1"}, "kg m-2"), + ({"weights": None, "_weights_units": "1"}, "kg m-2"), + ({"weights": [1, 2, 3], "_weights_units": "1"}, "kg m-2"), + ({"_weights_units": "s"}, "kg m-2"), + ({"test": "m", "_weights_units": "s"}, "kg m-2"), + ({"weights": None, "_weights_units": "s"}, "kg m-2"), + ({"weights": [1, 2, 3], "_weights_units": "s"}, "kg m-2 s"), ], ) def test_sum_units_func(kwargs, expected): - units = cf_units.Unit("s") + units = cf_units.Unit("kg m-2") result = iris.analysis._sum_units_func(units, **kwargs) assert result == expected + # Make sure that the units' string representation (= origin) has not + # changed if the units have not changed (even when weights units are "1") + if result == units: + assert result.origin == expected + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 28fbe429c1..8084ab31fa 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -367,14 +367,13 @@ def setUp(self): self.data = np.arange(6.0).reshape((2, 3)) self.lazydata = as_lazy_data(self.data) # Test cubes with (same-valued) real and lazy data - cube_real = Cube(self.data, units="m") + cube_real = Cube(self.data, units="kg m-2 s-1") for i_dim, name in enumerate(("y", "x")): npts = cube_real.shape[i_dim] coord = DimCoord(np.arange(npts), long_name=name) cube_real.add_dim_coord(coord, i_dim) self.cube_real = cube_real self.cube_lazy = cube_real.copy(data=self.lazydata) - self.cube_lazy.units = "kg" # Test weights and expected result for a y-collapse self.y_weights = np.array([0.3, 0.5]) self.full_weights_y = np.broadcast_to( @@ -396,7 +395,8 @@ def test_weighted_fullweights_real_y(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_y ) - self.assertEqual(cube_collapsed.units, "m") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_fullweights_lazy_y(self): # Full-shape weights, lazy data : Check lazy result, same values as real calc. @@ -407,7 +407,7 @@ def test_weighted_fullweights_lazy_y(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_y ) - self.assertEqual(cube_collapsed.units, "kg") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") def test_weighted_1dweights_real_y(self): # 1-D weights, real data : Check same results as full-shape. @@ -417,7 +417,8 @@ def test_weighted_1dweights_real_y(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_y ) - self.assertEqual(cube_collapsed.units, "m") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_1dweights_lazy_y(self): # 1-D weights, lazy data : Check lazy result, same values as real calc. @@ -428,7 +429,7 @@ def test_weighted_1dweights_lazy_y(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_y ) - self.assertEqual(cube_collapsed.units, "kg") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") def test_weighted_fullweights_real_x(self): # Full weights, real data, ** collapse X ** : as for 'y' case above @@ -438,7 +439,8 @@ def test_weighted_fullweights_real_x(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_x ) - self.assertEqual(cube_collapsed.units, "m") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_fullweights_lazy_x(self): # Full weights, lazy data, ** collapse X ** : as for 'y' case above @@ -449,7 +451,8 @@ def test_weighted_fullweights_lazy_x(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_x ) - self.assertEqual(cube_collapsed.units, "kg") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_1dweights_real_x(self): # 1-D weights, real data, ** collapse X ** : as for 'y' case above @@ -459,7 +462,8 @@ def test_weighted_1dweights_real_x(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_x ) - self.assertEqual(cube_collapsed.units, "m") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_1dweights_lazy_x(self): # 1-D weights, lazy data, ** collapse X ** : as for 'y' case above @@ -470,30 +474,34 @@ def test_weighted_1dweights_lazy_x(self): self.assertArrayAlmostEqual( cube_collapsed.data, self.expected_result_x ) - self.assertEqual(cube_collapsed.units, "kg") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_sum_fullweights_adapt_units_real_y(self): - # Check that units are adapted correctly ('m' * '1' = 'm') + # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) cube_collapsed = self.cube_real.collapsed( "y", SUM, weights=self.full_weights_y ) - self.assertEqual(cube_collapsed.units, "m") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_sum_fullweights_adapt_units_lazy_y(self): - # Check that units are adapted correctly ('kg' * '1' = 'kg') + # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) cube_collapsed = self.cube_lazy.collapsed( "y", SUM, weights=self.full_weights_y ) - self.assertEqual(cube_collapsed.units, "kg") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_sum_1dweights_adapt_units_real_y(self): - # Check that units are adapted correctly ('m' * '1' = 'm') + # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) # Note: the same test with lazy data fails: # https://github.com/SciTools/iris/issues/5083 cube_collapsed = self.cube_real.collapsed( "y", SUM, weights=self.y_weights ) - self.assertEqual(cube_collapsed.units, "m") + self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_sum_with_unknown_units_real_y(self): # Check that units are adapted correctly ('unknown' * '1' = 'unknown') @@ -543,27 +551,27 @@ def setUp(self): self.full_weights_x = self.cube_real.copy(self.full_weights_x_original) def test_weighted_sum_fullweights_adapt_units_real_y(self): - # Check that units are adapted correctly ('m' * 'm2' = 'm3') + # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) cube_collapsed = self.cube_real.collapsed( "y", SUM, weights=self.full_weights_y ) - self.assertEqual(cube_collapsed.units, "m3") + self.assertEqual(cube_collapsed.units, "kg s-1") def test_weighted_sum_fullweights_adapt_units_lazy_y(self): - # Check that units are adapted correctly ('kg' * 'm2' = 'kg m2') + # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) cube_collapsed = self.cube_lazy.collapsed( "y", SUM, weights=self.full_weights_y ) - self.assertEqual(cube_collapsed.units, "kg m2") + self.assertEqual(cube_collapsed.units, "kg s-1") def test_weighted_sum_1dweights_adapt_units_real_y(self): - # Check that units are adapted correctly ('m' * 'm2' = 'm3') + # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) # Note: the same test with lazy data fails: # https://github.com/SciTools/iris/issues/5083 cube_collapsed = self.cube_real.collapsed( "y", SUM, weights=self.y_weights ) - self.assertEqual(cube_collapsed.units, "m3") + self.assertEqual(cube_collapsed.units, "kg s-1") class Test_collapsed__multidim_weighted_with_str( @@ -988,7 +996,7 @@ def test_ancillary_variables_and_cell_measures_removed(self): self.assertEqual(res_cube.cell_measures(), []) def test_weights_arr(self): - weights = [0, 0, 1, 0, 2] + weights = np.array([0, 0, 1, 0, 2]) res_cube = self.cube.rolling_window("val", SUM, 5, weights=weights) np.testing.assert_array_equal(res_cube.data, [10, 13]) self.assertEqual(res_cube.units, "kg") From d6c6c99bc31bf76f118ecd471bb75be525d00fd0 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 26 Jun 2023 15:53:02 +0100 Subject: [PATCH 015/134] update whatsnew patch release version and date (#5360) --- docs/src/whatsnew/3.6.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/3.6.rst b/docs/src/whatsnew/3.6.rst index 892da3310d..389356df46 100644 --- a/docs/src/whatsnew/3.6.rst +++ b/docs/src/whatsnew/3.6.rst @@ -55,10 +55,10 @@ This document explains the changes made to Iris for this release or feature requests for improving Iris. Enjoy! -|iris_version| |build_date| -=========================== +v3.6.1 (26 June 2023) +===================== -.. dropdown:: |iris_version| Patches +.. dropdown:: v3.6.1 Patches :color: primary :icon: alert :animate: fade-in From 5b42f47e71fbeb7861a9df59c8bd8c0be9a340e3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 11 Jul 2023 09:45:55 +0100 Subject: [PATCH 016/134] [pre-commit.ci] pre-commit autoupdate (#5374) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/blacken-docs: 1.14.0 → 1.15.0](https://github.com/asottile/blacken-docs/compare/1.14.0...1.15.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 500f69134f..63c7839dc5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -56,7 +56,7 @@ repos: args: [--filter-files] - repo: https://github.com/asottile/blacken-docs - rev: 1.14.0 + rev: 1.15.0 hooks: - id: blacken-docs types: [file, rst] From bc17f434f1f639eaf0e7e967b9f6930c3d3d2576 Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Wed, 19 Jul 2023 14:49:37 +0100 Subject: [PATCH 017/134] Ignore sticklerci (#5381) --- docs/src/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/src/conf.py b/docs/src/conf.py index a204263a24..d7c04f2130 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -384,6 +384,7 @@ def _dotv(version): "http://www.nationalarchives.gov.uk/doc/open-government-licence", "https://www.metoffice.gov.uk/", "https://biggus.readthedocs.io/", + "https://stickler-ci.com/", ] # list of sources to exclude from the build. From 2a9e131875be42c39ab4b8fc9956e4a4d59f83a2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 20 Jul 2023 12:11:21 +0100 Subject: [PATCH 018/134] [pre-commit.ci] pre-commit autoupdate (#5380) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.3.0 → 23.7.0](https://github.com/psf/black/compare/23.3.0...23.7.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 63c7839dc5..9dc69d2649 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: additional_dependencies: [tomli] - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.7.0 hooks: - id: black pass_filenames: false From 66ecb4c33bb0676320c3d68a6e95e9412da9f6b7 Mon Sep 17 00:00:00 2001 From: Alex Chamberlain-Clay <68277260+acchamber@users.noreply.github.com> Date: Wed, 26 Jul 2023 11:14:48 +0100 Subject: [PATCH 019/134] Bugfix for #3696 (#5382) * first draft of fix with comments to be removed * Removed comments, added warning * responded to pull request * Updated tests and test warnings * fixed dtypes on bounds tests * Updated "what's new" docs section * added link to github page --------- Co-authored-by: alex.chamberlain-clay --- docs/src/whatsnew/latest.rst | 5 +++- lib/iris/_concatenate.py | 7 +++++ lib/iris/tests/test_concatenate.py | 29 +++++++++++++++---- .../unit/concatenate/test_concatenate.py | 8 +++++ 4 files changed, 42 insertions(+), 7 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 9b62715be6..7bdb55a58a 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -32,6 +32,9 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles lazy data. (:pull:`5307`) + +#. `@acchamber`_ added error and warning messages about coordinate overlaps to + :func:`~iris.cube.concatenate` to improve the concatenation process. (:pull:`5382`) 🐛 Bugs Fixed @@ -89,7 +92,7 @@ This document explains the changes made to Iris for this release Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: .. _@rsdavies: https://github.com/rsdavies - +.. _@acchamber: https://github.com/acchamber .. comment diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 7bb27eaf83..c6d58b1622 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -9,6 +9,7 @@ """ from collections import defaultdict, namedtuple +import warnings import dask.array as da import numpy as np @@ -992,6 +993,12 @@ def register( match = self._sequence( coord_signature.dim_extents[dim_ind], candidate_axis ) + if error_on_mismatch and not match: + msg = f"Found cubes with overlap on concatenate axis {candidate_axis}, cannot concatenate overlapping cubes" + raise iris.exceptions.ConcatenateError([msg]) + elif not match: + msg = f"Found cubes with overlap on concatenate axis {candidate_axis}, skipping concatenation for these cubes" + warnings.warn(msg) # Check for compatible AuxCoords. if match: diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index 7cb11189d6..9287a79fda 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -15,6 +15,7 @@ import dask.array as da import numpy as np import numpy.ma as ma +import pytest from iris.aux_factory import HybridHeightFactory from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord @@ -338,7 +339,10 @@ def test_points_overlap_increasing(self): y = (0, 2) cubes.append(_make_cube((0, 2), y, 1)) cubes.append(_make_cube((1, 3), y, 2)) - result = concatenate(cubes) + with pytest.warns( + UserWarning, match="Found cubes with overlap on concatenate axis" + ): + result = concatenate(cubes) self.assertEqual(len(result), 2) def test_points_overlap_decreasing(self): @@ -346,7 +350,10 @@ def test_points_overlap_decreasing(self): x = (0, 2) cubes.append(_make_cube(x, (3, 0, -1), 1)) cubes.append(_make_cube(x, (1, -1, -1), 2)) - result = concatenate(cubes) + with pytest.warns( + UserWarning, match="Found cubes with overlap on concatenate axis" + ): + result = concatenate(cubes) self.assertEqual(len(result), 2) def test_bounds_overlap_increasing(self): @@ -354,9 +361,14 @@ def test_bounds_overlap_increasing(self): y = (0, 2) cubes.append(_make_cube((0, 2), y, 1)) cube = _make_cube((2, 4), y, 1) - cube.coord("x").bounds = np.array([[0.5, 2.5], [2.5, 3.5]]) + cube.coord("x").bounds = np.array( + [[0.5, 2.5], [2.5, 3.5]], dtype=np.float32 + ) cubes.append(cube) - result = concatenate(cubes) + with pytest.warns( + UserWarning, match="Found cubes with overlap on concatenate axis" + ): + result = concatenate(cubes) self.assertEqual(len(result), 2) def test_bounds_overlap_decreasing(self): @@ -364,9 +376,14 @@ def test_bounds_overlap_decreasing(self): y = (0, 2) cubes.append(_make_cube((3, 1, -1), y, 1)) cube = _make_cube((1, -1, -1), y, 2) - cube.coord("x").bounds = np.array([[2.5, 0.5], [0.5, -0.5]]) + cube.coord("x").bounds = np.array( + [[2.5, 0.5], [0.5, -0.5]], dtype=np.float32 + ) cubes.append(cube) - result = concatenate(cubes) + with pytest.warns( + UserWarning, match="Found cubes with overlap on concatenate axis" + ): + result = concatenate(cubes) self.assertEqual(len(result), 2) def test_scalar_difference(self): diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index bb3770cf0f..c2ca01f781 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -242,6 +242,14 @@ def test_datatype_difference_message(self): with self.assertRaisesRegex(ConcatenateError, exc_regexp): _ = concatenate([cube_1, cube_2], True) + def test_dim_coords_overlap_message(self): + cube_1 = self.cube + cube_2 = cube_1.copy() + cube_2.coord("time").points = np.arange(1, 3, dtype=np.float32) + exc_regexp = "Found cubes with overlap on concatenate axis" + with self.assertRaisesRegex(ConcatenateError, exc_regexp): + _ = concatenate([cube_1, cube_2], True) + class TestOrder(tests.IrisTest): def _make_cube(self, points, bounds=None): From 3c1b3e6993ba1283b5630ea24ee450d2bd7f246f Mon Sep 17 00:00:00 2001 From: Alex Chamberlain-Clay <68277260+acchamber@users.noreply.github.com> Date: Wed, 26 Jul 2023 13:57:23 +0100 Subject: [PATCH 020/134] Added link to UDUNITS-2 database to convert_units docstrings (#5388) Co-authored-by: alex.chamberlain-clay --- docs/src/whatsnew/latest.rst | 4 ++++ lib/iris/coords.py | 2 ++ lib/iris/cube.py | 3 +++ 3 files changed, 9 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 7bdb55a58a..d7b0843415 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -78,6 +78,9 @@ This document explains the changes made to Iris for this release section into the user guide, containing advice and use cases to help users get the best out of Dask with Iris. +#. `@acchamber`_ improved documentation for :meth:`~iris.cube.Cube.convert_units` + and :meth:`~iris.coords.Coord.convert_units` by including a link to the UDUNITS-2 + documentation which contains lists of compatible units and aliases for them. 💼 Internal =========== @@ -95,5 +98,6 @@ This document explains the changes made to Iris for this release .. _@acchamber: https://github.com/acchamber + .. comment Whatsnew resources in alphabetical order: diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 63bc524637..1f00e10840 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -1862,6 +1862,8 @@ def convert_units(self, unit): multiply each value in :attr:`~iris.coords.Coord.points` and :attr:`~iris.coords.Coord.bounds` by 180.0/:math:`\pi`. + Full list of supported units can be found in the UDUNITS-2 documentation + https://docs.unidata.ucar.edu/udunits/current/#Database """ super().convert_units(unit=unit) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index ffc2cf96e2..a21844241f 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -1134,6 +1134,9 @@ def convert_units(self, unit): celsius and subtract 273.15 from each value in :attr:`~iris.cube.Cube.data`. + Full list of supported units can be found in the UDUNITS-2 documentation + https://docs.unidata.ucar.edu/udunits/current/#Database + This operation preserves lazy data. """ From e66a0c86f964c61ac9a91f6c2de15bc67b900d5f Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 31 Jul 2023 12:20:49 +0100 Subject: [PATCH 021/134] Include location coordinates in the data variable coordinates attribute (#5389) * Include UGRID location coordinates on variable when saving. * Adjustments to tests. * What's New entry. * Adjustment to doctest. * Add test for mixed aux coords. --- docs/src/further_topics/ugrid/operations.rst | 2 + docs/src/whatsnew/latest.rst | 5 +++ lib/iris/fileformats/netcdf/saver.py | 24 ++++++++++- .../TestBasicSave/ugrid_ex1_1d_mesh.cdl | 1 + .../TestBasicSave/ugrid_ex2_2d_triangular.cdl | 1 + .../TestBasicSave/ugrid_ex3_2d_flexible.cdl | 1 + .../TestBasicSave/ugrid_ex4_3d_layered.cdl | 2 +- .../TestSaveUgrid__cube/basic_mesh.cdl | 1 + .../netcdf/saver/test_Saver__ugrid.py | 42 ++++++++++++++++++- 9 files changed, 74 insertions(+), 5 deletions(-) diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/further_topics/ugrid/operations.rst index f0638800fa..a088b588e1 100644 --- a/docs/src/further_topics/ugrid/operations.rst +++ b/docs/src/further_topics/ugrid/operations.rst @@ -267,6 +267,7 @@ saves the file in a UGRID-conformant format: edge_data:units = "K" ; edge_data:mesh = "my_mesh" ; edge_data:location = "edge" ; + edge_data:coordinates = "latitude_0 longitude_0" ; int64 height(height) ; height:standard_name = "height" ; int64 face_data(Mesh2d_face, height) ; @@ -274,6 +275,7 @@ saves the file in a UGRID-conformant format: face_data:units = "K" ; face_data:mesh = "my_mesh" ; face_data:location = "face" ; + face_data:coordinates = "latitude_1 longitude_1" ; // global attributes: :Conventions = "CF-1.7" ; diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index d7b0843415..3e51198ca3 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -36,6 +36,11 @@ This document explains the changes made to Iris for this release #. `@acchamber`_ added error and warning messages about coordinate overlaps to :func:`~iris.cube.concatenate` to improve the concatenation process. (:pull:`5382`) +#. `@trexfeathers`_ included mesh location coordinates + (e.g. :attr:`~iris.experimental.ugrid.Mesh.face_coords`) in + the data variable's ``coordinates`` attribute when saving to NetCDF. + (:issue:`5206`, :pull:`5389`) + 🐛 Bugs Fixed ============= diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 312eea9c43..c0cfd3d10b 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -895,6 +895,9 @@ def _add_mesh(self, cube_or_mesh): coord, element_dims=(mesh_dims[location],), ) + # Only created once per file, but need to fetch the + # name later in _add_inner_related_vars(). + self._name_coord_map.append(coord_name, coord) coord_names.append(coord_name) # Record the coordinates (if any) on the mesh variable. if coord_names: @@ -1024,15 +1027,32 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names): Names associated with the dimensions of the cube. """ + from iris.experimental.ugrid.mesh import ( + Mesh, + MeshEdgeCoords, + MeshFaceCoords, + MeshNodeCoords, + ) + # Exclude any mesh coords, which are bundled in with the aux-coords. - aux_coords_no_mesh = [ + coords_to_add = [ coord for coord in cube.aux_coords if not hasattr(coord, "mesh") ] + + # Include any relevant mesh location coordinates. + mesh: Mesh = getattr(cube, "mesh") + mesh_location: str = getattr(cube, "location") + if mesh and mesh_location: + location_coords: MeshNodeCoords | MeshEdgeCoords | MeshFaceCoords = getattr( + mesh, f"{mesh_location}_coords" + ) + coords_to_add.extend(list(location_coords)) + return self._add_inner_related_vars( cube, cf_var_cube, dimension_names, - aux_coords_no_mesh, + coords_to_add, ) def _add_cell_measures(self, cube, cf_var_cube, dimension_names): diff --git a/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex1_1d_mesh.cdl b/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex1_1d_mesh.cdl index 517991a17a..9a9ae5627d 100644 --- a/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex1_1d_mesh.cdl +++ b/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex1_1d_mesh.cdl @@ -33,6 +33,7 @@ variables: float datavar(nMesh1_edge) ; datavar:mesh = "Mesh1" ; datavar:location = "edge" ; + datavar:coordinates = "Mesh1_edge_x Mesh1_edge_y" ; // global attributes: :Conventions = "CF-1.7" ; diff --git a/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex2_2d_triangular.cdl b/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex2_2d_triangular.cdl index 5d01a263d6..761ca2e98f 100644 --- a/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex2_2d_triangular.cdl +++ b/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex2_2d_triangular.cdl @@ -69,6 +69,7 @@ variables: float datavar(nMesh2_face) ; datavar:mesh = "Mesh2" ; datavar:location = "face" ; + datavar:coordinates = "Mesh2_face_x Mesh2_face_y" ; // global attributes: :Conventions = "CF-1.7" ; diff --git a/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex3_2d_flexible.cdl b/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex3_2d_flexible.cdl index 355799e0d8..aa5879867d 100644 --- a/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex3_2d_flexible.cdl +++ b/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex3_2d_flexible.cdl @@ -69,6 +69,7 @@ variables: float datavar(nMesh2_face) ; datavar:mesh = "Mesh2" ; datavar:location = "face" ; + datavar:coordinates = "Mesh2_face_x Mesh2_face_y" ; // global attributes: :Conventions = "CF-1.7" ; diff --git a/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex4_3d_layered.cdl b/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex4_3d_layered.cdl index 64962e79aa..173dd9462c 100644 --- a/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex4_3d_layered.cdl +++ b/lib/iris/tests/results/integration/experimental/ugrid_save/TestBasicSave/ugrid_ex4_3d_layered.cdl @@ -70,7 +70,7 @@ variables: float datavar(Mesh2_layers, nMesh2_face) ; datavar:mesh = "Mesh2" ; datavar:location = "face" ; - datavar:coordinates = "Mesh2_depth Mesh2_surface" ; + datavar:coordinates = "Mesh2_depth Mesh2_face_x Mesh2_face_y Mesh2_surface" ; double Mesh2_layers(Mesh2_layers) ; Mesh2_layers:axis = "Z" ; Mesh2_layers:units = "1" ; diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl index 91516ddae3..6bb87f3a05 100644 --- a/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl +++ b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl @@ -23,6 +23,7 @@ variables: float unknown(Mesh2d_faces) ; unknown:mesh = "Mesh2d" ; unknown:location = "face" ; + unknown:coordinates = "face_x face_y" ; // global attributes: :Conventions = "CF-1.7" ; diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 323b498d9c..27d9709fe6 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -403,6 +403,10 @@ def test_basic_mesh(self): all(vars[co][_VAR_DIMS] == [face_dim] for co in face_coords) ) + # The face coordinates should be referenced by the data variable. + for coord in face_coords: + self.assertIn(coord, a_props["coordinates"]) + # The dims of the datavar also == [] self.assertEqual(a_props[_VAR_DIMS], [face_dim]) @@ -460,8 +464,10 @@ def test_multi_cubes_common_mesh(self): v_a, v_b = vars["a"], vars["b"] self.assertEqual(v_a["mesh"], mesh_name) self.assertEqual(v_a["location"], "face") + self.assertEqual(v_a["coordinates"], "face_x face_y") self.assertEqual(v_b["mesh"], mesh_name) self.assertEqual(v_b["location"], "face") + self.assertEqual(v_b["coordinates"], "face_x face_y") def test_multi_cubes_different_locations(self): cube1 = make_cube(var_name="a", location="face") @@ -478,8 +484,10 @@ def test_multi_cubes_different_locations(self): v_a, v_b = vars["a"], vars["b"] self.assertEqual(v_a["mesh"], mesh_name) self.assertEqual(v_a["location"], "face") + self.assertEqual(v_a["coordinates"], "face_x face_y") self.assertEqual(v_b["mesh"], mesh_name) self.assertEqual(v_b["location"], "node") + self.assertEqual(v_b["coordinates"], "node_x node_y") # the main variables map the face and node dimensions face_dim = vars_meshdim(vars, "face") @@ -520,6 +528,7 @@ def test_multi_cubes_equal_meshes(self): for props in a_props, b_props: self.assertEqual(props["mesh"], "Mesh2d") self.assertEqual(props["location"], "face") + self.assertEqual(props["coordinates"], "face_x face_y") # the data variables map the appropriate node dimensions self.assertEqual(a_props[_VAR_DIMS], ["Mesh2d_faces"]) @@ -543,11 +552,15 @@ def test_multi_cubes_different_mesh(self): self.assertEqual(2, len(mesh_datavars)) self.assertEqual(["a", "b"], sorted(mesh_datavars.keys())) + def get_props_attrs(props: dict): + return props["mesh"], props["location"], props["coordinates"] + # the main variables reference the correct meshes, and 'face' location a_props, b_props = vars["a"], vars["b"] - mesh_a, loc_a = a_props["mesh"], a_props["location"] - mesh_b, loc_b = b_props["mesh"], b_props["location"] + mesh_a, loc_a, coords_a = get_props_attrs(a_props) + mesh_b, loc_b, coords_b = get_props_attrs(b_props) self.assertNotEqual(mesh_a, mesh_b) + self.assertNotEqual(coords_a, coords_b) self.assertEqual(loc_a, "face") self.assertEqual(loc_b, "face") @@ -664,6 +677,31 @@ def test_alternate_cube_dim_order(self): self.assertEqual(v_a[_VAR_DIMS], ["height", "Mesh2d_faces"]) self.assertEqual(v_b[_VAR_DIMS], ["Mesh2d_faces", "height"]) + def test_mixed_aux_coords(self): + """ + ``coordinates`` attribute should include mesh location coords and 'normal' coords. + """ + + cube = make_cube() + mesh_dim = cube.mesh_dim() + mesh_len = cube.shape[mesh_dim] + coord = AuxCoord(np.arange(mesh_len), var_name="face_index") + cube.add_aux_coord(coord, mesh_dim) + + # Save and snapshot the result + tempfile_path = self.check_save_cubes(cube) + dims, vars = scan_dataset(tempfile_path) + + # There is exactly 1 mesh-linked (data)var + data_vars = vars_w_props(vars, mesh="*") + ((_, a_props),) = data_vars.items() + + expected_coords = [c for c in cube.mesh.face_coords] + expected_coords.append(coord) + expected_coord_names = [c.var_name for c in expected_coords] + expected_coord_attr = " ".join(sorted(expected_coord_names)) + self.assertEqual(a_props["coordinates"], expected_coord_attr) + class TestSaveUgrid__mesh(tests.IrisTest): """Tests for saving meshes to a file.""" From 1efa4bff101c4bb0431848e6a113af4e3be14efb Mon Sep 17 00:00:00 2001 From: Alex Chamberlain-Clay <68277260+acchamber@users.noreply.github.com> Date: Mon, 31 Jul 2023 14:36:09 +0100 Subject: [PATCH 022/134] =?UTF-8?q?removed=20redundant=20timeattr=20check?= =?UTF-8?q?=20and=20added=20tests=20to=20ensure=20datetime/c=E2=80=A6=20(#?= =?UTF-8?q?5396)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * removed redundant timeattr check and added tests to ensure datetime/cftime.datetime/partialdatetime play nicely. * responded to review at #5396 and added test/changes to common_cmp * responces to review * updated whats new --------- Co-authored-by: alex.chamberlain-clay --- docs/src/whatsnew/latest.rst | 4 +- lib/iris/coords.py | 14 -- lib/iris/tests/unit/coords/test_Cell.py | 163 +++++++++++++++++++----- 3 files changed, 134 insertions(+), 47 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 3e51198ca3..a16ce33172 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -45,7 +45,8 @@ This document explains the changes made to Iris for this release 🐛 Bugs Fixed ============= -#. N/A +#. `@acchamber`_ removed some obsolete code that prevented extraction of time points + from cubes with bounded times (:pull:`5175`) 💣 Incompatible Changes @@ -96,6 +97,7 @@ This document explains the changes made to Iris for this release (:pull:`5214`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 1f00e10840..1a6e8d4e6a 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -1464,12 +1464,6 @@ def __common_cmp__(self, other, operator_method): # - Simple matching me = self.point else: - if hasattr(other, "timetuple"): - raise TypeError( - "Cannot determine whether a point lies " - "within a bounded region for " - "datetime-like objects." - ) # Point-and-bound vs number # - Match if "within" the Cell if operator_method in [operator.gt, operator.le]: @@ -1510,14 +1504,6 @@ def contains_point(self, point): """ if self.bound is None: raise ValueError("Point cannot exist inside an unbounded cell.") - if hasattr(point, "timetuple") or np.any( - [hasattr(val, "timetuple") for val in self.bound] - ): - raise TypeError( - "Cannot determine whether a point lies within " - "a bounded region for datetime-like objects." - ) - return np.min(self.bound) <= point <= np.max(self.bound) diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py index d191993d51..2408ec9f36 100644 --- a/lib/iris/tests/unit/coords/test_Cell.py +++ b/lib/iris/tests/unit/coords/test_Cell.py @@ -10,7 +10,6 @@ import iris.tests as tests # isort:skip import datetime -from unittest import mock import cftime import numpy as np @@ -31,9 +30,7 @@ def assert_raises_on_comparison(self, cell, other, exception_type, regexp): cell >= other def test_PartialDateTime_bounded_cell(self): - # Check that bounded comparisons to a PartialDateTime - # raise an exception. These are not supported as they - # depend on the calendar. + # Check bounded cell comparisons to a PartialDateTime dt = PartialDateTime(month=6) cell = Cell( datetime.datetime(2010, 1, 1), @@ -42,8 +39,23 @@ def test_PartialDateTime_bounded_cell(self): datetime.datetime(2011, 1, 1), ], ) + self.assertGreater(dt, cell) + self.assertGreaterEqual(dt, cell) + self.assertLess(cell, dt) + self.assertLessEqual(cell, dt) + + def test_cftime_calender_bounded_cell(self): + # Check that cell comparisons fail with different calendars + dt = cftime.datetime(2010, 3, 1, calendar="360_day") + cell = Cell( + datetime.datetime(2010, 1, 1), + bound=[ + datetime.datetime(2010, 1, 1), + datetime.datetime(2011, 1, 1), + ], + ) self.assert_raises_on_comparison( - cell, dt, TypeError, "bounded region for datetime" + cell, dt, TypeError, "different calendars" ) def test_PartialDateTime_unbounded_cell(self): @@ -56,7 +68,7 @@ def test_PartialDateTime_unbounded_cell(self): self.assertGreaterEqual(dt, cell) def test_datetime_unbounded_cell(self): - # Check that cell comparison works with datetimes. + # Check that cell comparison works with datetimes & cftimes. dt = datetime.datetime(2000, 6, 15) cell = Cell(cftime.datetime(2000, 1, 1)) self.assertGreater(dt, cell) @@ -87,29 +99,40 @@ def test_len_1_numpy_array(self): class Test___eq__(tests.IrisTest): def test_datetimelike(self): - # Check that cell equality works with objects with a "timetuple". - dt = mock.Mock(timetuple=mock.Mock()) - cell = mock.MagicMock( - spec=Cell, point=datetime.datetime(2010, 3, 21), bound=None + # Check that cell equality works with different datetime objects + # using the same calendar + point = cftime.datetime(2010, 1, 1, calendar="gregorian") + cell = Cell( + datetime.datetime(2010, 1, 1), + bound=None, ) - _ = cell == dt - cell.__eq__.assert_called_once_with(dt) + self.assertEqual(cell, point) def test_datetimelike_bounded_cell(self): - # Check that equality with a datetime-like bounded cell - # raises an error. This is not supported as it - # depends on the calendar which is not always known from - # the datetime-like bound objects. - other = mock.Mock(timetuple=mock.Mock()) + # Check that cell equality works with bounded cells using different datetime objects + point = cftime.datetime(2010, 1, 1, calendar="gregorian") + cell = Cell( + datetime.datetime(2010, 1, 1), + bound=[ + datetime.datetime(2010, 1, 1), + datetime.datetime(2011, 1, 1), + ], + ) + self.assertEqual(cell, point) + + def test_datetimelike_calenders_cell(self): + # Check that equality with a cell with a different calendar + # raises an error. This is not supported + point = cftime.datetime(2010, 1, 1, calendar="360_day") cell = Cell( - point=object(), + datetime.datetime(2010, 1, 1), bound=[ - mock.Mock(timetuple=mock.Mock()), - mock.Mock(timetuple=mock.Mock()), + datetime.datetime(2010, 1, 1), + datetime.datetime(2011, 1, 1), ], ) - with self.assertRaisesRegex(TypeError, "bounded region for datetime"): - cell == other + with self.assertRaisesRegex(TypeError, "different calendars"): + cell >= point def test_PartialDateTime_other(self): cell = Cell(datetime.datetime(2010, 3, 2)) @@ -120,24 +143,100 @@ def test_PartialDateTime_other(self): class Test_contains_point(tests.IrisTest): - def test_datetimelike_bounded_cell(self): - point = object() + """ + Test that contains_point works for combinations of datetime, + cf.datatime, and PartialDateTime objects""" + + def test_datetime_PartialDateTime_point(self): + point = PartialDateTime(month=6) cell = Cell( - point=object(), + datetime.datetime(2010, 1, 1), bound=[ - mock.Mock(timetuple=mock.Mock()), - mock.Mock(timetuple=mock.Mock()), + datetime.datetime(2010, 1, 1), + datetime.datetime(2011, 1, 1), ], ) - with self.assertRaisesRegex(TypeError, "bounded region for datetime"): + self.assertFalse(cell.contains_point(point)) + + def test_datetime_cftime_standard_point(self): + point = cftime.datetime(2010, 6, 15) + cell = Cell( + datetime.datetime(2010, 1, 1), + bound=[ + datetime.datetime(2010, 1, 1), + datetime.datetime(2011, 1, 1), + ], + ) + self.assertTrue(cell.contains_point(point)) + + def test_datetime_cftime_360day_point(self): + point = cftime.datetime(2010, 6, 15, calendar="360_day") + cell = Cell( + datetime.datetime(2010, 1, 1), + bound=[ + datetime.datetime(2010, 1, 1), + datetime.datetime(2011, 1, 1), + ], + ) + with self.assertRaisesRegex(TypeError, "different calendars"): cell.contains_point(point) - def test_datetimelike_point(self): - point = mock.Mock(timetuple=mock.Mock()) - cell = Cell(point=object(), bound=[object(), object()]) - with self.assertRaisesRegex(TypeError, "bounded region for datetime"): + def test_cftime_standard_PartialDateTime_point(self): + point = PartialDateTime(month=6) + cell = Cell( + cftime.datetime(2010, 1, 1), + bound=[ + cftime.datetime(2010, 1, 1), + cftime.datetime(2011, 1, 1), + ], + ) + self.assertFalse(cell.contains_point(point)) + + def test_cftime_360day_PartialDateTime_point(self): + point = PartialDateTime(month=6) + cell = Cell( + cftime.datetime(2010, 1, 1, calendar="360_day"), + bound=[ + cftime.datetime(2010, 1, 1, calendar="360_day"), + cftime.datetime(2011, 1, 1, calendar="360_day"), + ], + ) + self.assertFalse(cell.contains_point(point)) + + def test_cftime_standard_datetime_point(self): + point = datetime.datetime(2010, 6, 1) + cell = Cell( + cftime.datetime(2010, 1, 1), + bound=[ + cftime.datetime(2010, 1, 1), + cftime.datetime(2011, 1, 1), + ], + ) + self.assertTrue(cell.contains_point(point)) + + def test_cftime_360day_datetime_point(self): + point = datetime.datetime(2010, 6, 1) + cell = Cell( + cftime.datetime(2010, 1, 1, calendar="360_day"), + bound=[ + cftime.datetime(2010, 1, 1, calendar="360_day"), + cftime.datetime(2011, 1, 1, calendar="360_day"), + ], + ) + with self.assertRaisesRegex(TypeError, "different calendars"): cell.contains_point(point) + def test_cftime_360_day_cftime_360day_point(self): + point = cftime.datetime(2010, 6, 15, calendar="360_day") + cell = Cell( + cftime.datetime(2010, 1, 1, calendar="360_day"), + bound=[ + cftime.datetime(2010, 1, 1, calendar="360_day"), + cftime.datetime(2011, 1, 1, calendar="360_day"), + ], + ) + self.assertTrue(cell.contains_point(point)) + class Test_numpy_comparison(tests.IrisTest): """ From ea2a2721101489d7c84f3b8437494ee5ed646d0a Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Mon, 31 Jul 2023 15:54:41 +0100 Subject: [PATCH 023/134] Avoid using deprecated ContourSet attributes (#5405) --- docs/src/whatsnew/latest.rst | 3 +++ lib/iris/plot.py | 26 ++++++++++++------- lib/iris/tests/unit/plot/test_contourf.py | 6 ++--- .../tests/unit/quickplot/test_contourf.py | 7 +++-- 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index a16ce33172..ce561ff9c4 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -96,6 +96,9 @@ This document explains the changes made to Iris for this release `"Xarray bridge" `_ facility. (:pull:`5214`) +#. `@rcomer`_ updated :func:`~iris.plot.contourf` to avoid using functionality + that is deprecated in Matplotlib v3.8 (:pull:`5405`) + .. comment diff --git a/lib/iris/plot.py b/lib/iris/plot.py index d319c1361b..ebcb5c3bcb 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -1155,16 +1155,26 @@ def contourf(cube, *args, **kwargs): # But if the polygons are virtually opaque then we can cover the seams # by drawing anti-aliased lines *underneath* the polygon joins. - # Figure out the alpha level for the contour plot - if result.alpha is None: - alpha = result.collections[0].get_facecolor()[0][3] + if hasattr(result, "get_antialiased"): + # Matplotlib v3.8 onwards. + antialiased = any(result.get_antialiased()) + # Figure out the colours and alpha level for the contour plot + colors = result.get_facecolor() + alpha = result.alpha or colors[0][3] + # Define a zorder just *below* the polygons to ensure we minimise any boundary shift. + zorder = result.zorder - 0.1 else: - alpha = result.alpha + antialiased = result.antialiased + # Figure out the alpha level for the contour plot + alpha = result.alpha or result.collections[0].get_facecolor()[0][3] + colors = [c[0] for c in result.tcolors] + # Define a zorder just *below* the polygons to ensure we minimise any boundary shift. + zorder = result.collections[0].zorder - 0.1 + # If the contours are anti-aliased and mostly opaque then draw lines under # the seams. - if result.antialiased and alpha > 0.95: + if antialiased and alpha > 0.95: levels = result.levels - colors = [c[0] for c in result.tcolors] if result.extend == "neither": levels = levels[1:-1] colors = colors[:-1] @@ -1177,11 +1187,7 @@ def contourf(cube, *args, **kwargs): else: colors = colors[:-1] if len(levels) > 0 and np.nanmax(cube.data) > levels[0]: - # Draw the lines just *below* the polygons to ensure we minimise - # any boundary shift. - zorder = result.collections[0].zorder - 0.1 axes = kwargs.get("axes", None) - contour( cube, levels=levels, diff --git a/lib/iris/tests/unit/plot/test_contourf.py b/lib/iris/tests/unit/plot/test_contourf.py index 0247fb5a91..de84e88a52 100644 --- a/lib/iris/tests/unit/plot/test_contourf.py +++ b/lib/iris/tests/unit/plot/test_contourf.py @@ -69,10 +69,8 @@ def setUp(self): self.bar_index = np.arange(self.bar.size) self.data = self.cube.data self.dataT = self.data.T - mocker = mock.Mock(alpha=0, antialiased=False) - self.mpl_patch = self.patch( - "matplotlib.pyplot.contourf", return_value=mocker - ) + mocker = mock.Mock(wraps=plt.contourf) + self.mpl_patch = self.patch("matplotlib.pyplot.contourf", mocker) self.draw_func = iplt.contourf diff --git a/lib/iris/tests/unit/quickplot/test_contourf.py b/lib/iris/tests/unit/quickplot/test_contourf.py index 2624ebd08e..e510e661ae 100644 --- a/lib/iris/tests/unit/quickplot/test_contourf.py +++ b/lib/iris/tests/unit/quickplot/test_contourf.py @@ -11,6 +11,7 @@ from unittest import mock +import matplotlib.pyplot as plt import numpy as np from iris.tests.stock import simple_2d @@ -42,10 +43,8 @@ def setUp(self): self.bar_index = np.arange(self.bar.size) self.data = self.cube.data self.dataT = self.data.T - mocker = mock.Mock(alpha=0, antialiased=False) - self.mpl_patch = self.patch( - "matplotlib.pyplot.contourf", return_value=mocker - ) + mocker = mock.Mock(wraps=plt.contourf) + self.mpl_patch = self.patch("matplotlib.pyplot.contourf", mocker) # Also need to mock the colorbar. self.patch("matplotlib.pyplot.colorbar") self.draw_func = qplt.contourf From b5f43fbd4ce22c8ed708c3d9c5769a927f689d56 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 12:54:52 +0100 Subject: [PATCH 024/134] [pre-commit.ci] pre-commit autoupdate (#5406) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/PyCQA/flake8: 6.0.0 → 6.1.0](https://github.com/PyCQA/flake8/compare/6.0.0...6.1.0) * Switch to isinstance(). (#5408) --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- lib/iris/_constraints.py | 10 +++++----- lib/iris/cube.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9dc69d2649..c641389768 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -43,7 +43,7 @@ repos: args: [--config=./pyproject.toml, .] - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: - id: flake8 types: [file, python] diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index bfd4865f56..1884cbcbd9 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -137,7 +137,7 @@ def __eq__(self, other): # attributes/names/coords : These can only be == if they contain the # *same* callable object (i.e. same object identity). eq = ( - type(other) == Constraint + isinstance(other, Constraint) and self._name == other._name and self._cube_func == other._cube_func and self._coord_constraints == other._coord_constraints @@ -244,7 +244,7 @@ def __init__(self, lhs, rhs, operator): def __eq__(self, other): eq = ( - type(other) == ConstraintCombination + isinstance(other, ConstraintCombination) and self.lhs == other.lhs and self.rhs == other.rhs and self.operator == other.operator @@ -300,7 +300,7 @@ def __repr__(self): def __eq__(self, other): eq = ( - type(other) == _CoordConstraint + isinstance(other, _CoordConstraint) and self.coord_name == other.coord_name and self._coord_thing == other._coord_thing ) @@ -544,7 +544,7 @@ def __init__(self, **attributes): def __eq__(self, other): eq = ( - type(other) == AttributeConstraint + isinstance(other, AttributeConstraint) and self._attributes == other._attributes ) return eq @@ -638,7 +638,7 @@ def __init__( super().__init__(cube_func=self._cube_func) def __eq__(self, other): - eq = type(other) == NameConstraint and all( + eq = isinstance(other, NameConstraint) and all( getattr(self, attname) == getattr(other, attname) for attname in self._names ) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index a21844241f..35e3a903c6 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -776,7 +776,7 @@ def copy(self): """ Return a CubeList when CubeList.copy() is called. """ - if type(self) == CubeList: + if isinstance(self, CubeList): return deepcopy(self) From 41bcbaa03440db3c0496182b35018a74987cd1bf Mon Sep 17 00:00:00 2001 From: Alex Chamberlain-Clay <68277260+acchamber@users.noreply.github.com> Date: Mon, 7 Aug 2023 14:13:56 +0100 Subject: [PATCH 025/134] Fix for #5372 (#5412) * Added conversion to dtypes for points and bounds in unify_time_units * Added whats new entry and updated docstring * changed docs in response to review --- docs/src/whatsnew/latest.rst | 3 ++ .../tests/unit/util/test_unify_time_units.py | 30 +++++++++++++++++++ lib/iris/util.py | 8 ++++- 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index ce561ff9c4..9a9a18ccaa 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -45,6 +45,9 @@ This document explains the changes made to Iris for this release 🐛 Bugs Fixed ============= +#. `@acchamber`_ fixed a bug with :func:`~iris.util.unify_time_units` so it does not block + concatenation through different data types in rare instances. (:pull:`5372`) + #. `@acchamber`_ removed some obsolete code that prevented extraction of time points from cubes with bounded times (:pull:`5175`) diff --git a/lib/iris/tests/unit/util/test_unify_time_units.py b/lib/iris/tests/unit/util/test_unify_time_units.py index daf71890b1..8bee046dad 100644 --- a/lib/iris/tests/unit/util/test_unify_time_units.py +++ b/lib/iris/tests/unit/util/test_unify_time_units.py @@ -112,6 +112,36 @@ def test_multiple_calendars(self): unify_time_units(cubelist) self._common(expected, cubelist) + def test_units_dtype_ints(self): + cube0, cube1 = self.simple_1d_time_cubes() + cube0.coord("time").points = np.array([1, 2, 3, 4, 5], dtype=int) + cube1.coord("time").points = np.array([1, 2, 3, 4, 5], dtype=int) + cubelist = iris.cube.CubeList([cube0, cube1]) + unify_time_units(cubelist) + assert len(cubelist.concatenate()) == 1 + + def test_units_bounded_dtype_ints(self): + cube0, cube1 = self.simple_1d_time_cubes() + cube0.coord("time").bounds = np.array( + [[0, 1], [1, 2], [2, 3], [3, 4], [4, 5]], dtype=int + ) + cube1.coord("time").bounds = np.array( + [[0, 1], [1, 2], [2, 3], [3, 4], [4, 5]], dtype=np.float64 + ) + cubelist = iris.cube.CubeList([cube0, cube1]) + unify_time_units(cubelist) + assert len(cubelist.concatenate()) == 1 + + def test_units_dtype_int_float(self): + cube0, cube1 = self.simple_1d_time_cubes() + cube0.coord("time").points = np.array([1, 2, 3, 4, 5], dtype=int) + cube1.coord("time").points = np.array( + [1, 2, 3, 4, 5], dtype=np.float64 + ) + cubelist = iris.cube.CubeList([cube0, cube1]) + unify_time_units(cubelist) + assert len(cubelist.concatenate()) == 1 + if __name__ == "__main__": tests.main() diff --git a/lib/iris/util.py b/lib/iris/util.py index 0b31ebdafc..c040b72b54 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -1471,7 +1471,8 @@ def unify_time_units(cubes): Performs an in-place conversion of the time units of all time coords in the cubes in a given iterable. One common epoch is defined for each calendar found in the cubes to prevent units being defined with inconsistencies - between epoch and calendar. + between epoch and calendar. During this process, all time coordinates have + their data type converted to 64-bit floats to allow for smooth concatenation. Each epoch is defined from the first suitable time coordinate found in the input cubes. @@ -1492,6 +1493,11 @@ def unify_time_units(cubes): for cube in cubes: for time_coord in cube.coords(): if time_coord.units.is_time_reference(): + time_coord.points = time_coord.core_points().astype("float64") + if time_coord.bounds is not None: + time_coord.bounds = time_coord.core_bounds().astype( + "float64" + ) epoch = epochs.setdefault( time_coord.units.calendar, time_coord.units.origin ) From 34aed746ac57cdb474c41c859164fa9d774d9ecb Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 7 Aug 2023 15:50:34 +0100 Subject: [PATCH 026/134] Minor docs navbar improvements. (#5415) --- docs/src/conf.py | 4 +++- docs/src/index.rst | 13 +++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/docs/src/conf.py b/docs/src/conf.py index d7c04f2130..7f7322c1f8 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -294,7 +294,9 @@ def _dotv(version): "collapse_navigation": True, "navigation_depth": 3, "show_prev_next": True, - "navbar_align": "content", + "navbar_align": "left", + # TODO: review if 6 links is too crowded. + "header_links_before_dropdown": 6, "github_url": "https://github.com/SciTools/iris", "twitter_url": "https://twitter.com/scitools_iris", # icons available: https://fontawesome.com/v5.15/icons?d=gallery&m=free diff --git a/docs/src/index.rst b/docs/src/index.rst index 21971c2322..b353406f58 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -187,19 +187,20 @@ The legacy support resources: .. toctree:: - :caption: Iris API + :caption: What's New in Iris :maxdepth: 1 + :name: whats_new_index :hidden: - Iris API + whatsnew/index .. toctree:: - :caption: What's New in Iris + :caption: Iris API :maxdepth: 1 - :name: whats_new_index :hidden: - whatsnew/index + Iris API + -.. todolist:: \ No newline at end of file +.. todolist:: From 343977ad294dd521230fbac54ca95f36f7862316 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Wed, 9 Aug 2023 13:49:52 +0100 Subject: [PATCH 027/134] Avoid cftime warnings on pp-load (#5357) * Avoid cftime warnings on pp-load * whatsnew --- docs/src/whatsnew/latest.rst | 3 +++ lib/iris/fileformats/pp.py | 8 ++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 9a9a18ccaa..6eade29273 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -50,6 +50,9 @@ This document explains the changes made to Iris for this release #. `@acchamber`_ removed some obsolete code that prevented extraction of time points from cubes with bounded times (:pull:`5175`) + +#. `@rcomer`_ modified pp-loading to avoid a ``cftime`` warning for non-standard + calendars. (:pull:`5357`) 💣 Incompatible Changes diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index ad0c6272ad..65e0e16d72 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -1486,7 +1486,7 @@ def t1(self): """ if not hasattr(self, "_t1"): - has_year_zero = self.lbyr == 0 + has_year_zero = self.lbyr == 0 or None calendar = ( None if self.lbmon == 0 or self.lbdat == 0 else self.calendar ) @@ -1520,7 +1520,7 @@ def t2(self): """ if not hasattr(self, "_t2"): - has_year_zero = self.lbyrd == 0 + has_year_zero = self.lbyrd == 0 or None calendar = ( None if self.lbmond == 0 or self.lbdatd == 0 else self.calendar ) @@ -1567,7 +1567,7 @@ def t1(self): """ if not hasattr(self, "_t1"): - has_year_zero = self.lbyr == 0 + has_year_zero = self.lbyr == 0 or None calendar = ( None if self.lbmon == 0 or self.lbdat == 0 else self.calendar ) @@ -1602,7 +1602,7 @@ def t2(self): """ if not hasattr(self, "_t2"): - has_year_zero = self.lbyrd == 0 + has_year_zero = self.lbyrd == 0 or None calendar = ( None if self.lbmond == 0 or self.lbdatd == 0 else self.calendar ) From 3531f7b0a27d25d50fe20f17c53e06e5ebfc2dc0 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Thu, 10 Aug 2023 10:11:37 +0100 Subject: [PATCH 028/134] Updated environment lockfiles (#5366) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 172 +++++++++++++----------- requirements/locks/py311-linux-64.lock | 172 +++++++++++++----------- requirements/locks/py39-linux-64.lock | 178 +++++++++++++------------ 3 files changed, 282 insertions(+), 240 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index fa6050a524..9c5ea32d8e 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,9 +1,9 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 2b004b3b54cef3f1b8e174aef2273590c7e578f60de14562357ef83ec73063ce +# input_hash: 90bea26e2629b01270a880c650dfec7b34c38d9b6c6ddb4f8c9fee205d0e1ad6 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb @@ -21,6 +21,7 @@ https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 @@ -33,7 +34,7 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -47,13 +48,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h803 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -68,15 +69,19 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d -https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -84,27 +89,29 @@ https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.cond https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_0.conda#aa8b86066614c4573f6db62c91978fa9 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_0.conda#276339b0115d92c6e0793dcdc7afe308 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc @@ -117,25 +124,28 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.con https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.5.7-pyhd8ed1ab_0.conda#5d1b71c942b8421285934dad1d891ebc +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py310hd8f1fbe_9.conda#e2047ad2af52c01845f58b580c6cbd5c +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.1.0-pyhd8ed1ab_0.conda#7fcff9f6f123696e940bda77bd4d6551 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.35-py310hc6cd4ac_0.conda#115ffd79412d084f541f485b92c94fcf +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py310hc6cd4ac_0.conda#b903ef2ce154e97f621fe30d999227ad https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 -https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb +https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -143,25 +153,25 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_1.tar.bz2#ad5647e517ba68e2868ef2e6e6ff7723 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_0.conda#b10174a063ec195f8fe1b278282c3149 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_0.conda#5597d9f9778af6883ae64f0e7d39416c https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.0-py310ha4c1d20_0.conda#03319f78e5c9c8d90c0110e2c6ed24f6 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py310ha4c1d20_0.conda#188e72aa313da668464e35309e9a32b0 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h1fa729e_0.conda#b0f0a014fc04012c05f39df15fe270ce https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.0-pyhd8ed1ab_0.conda#d3ed087d1f7f8f5590e8e87b57a8ce64 +https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 @@ -173,27 +183,24 @@ https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py310h2372a71_0.conda#1c510e74c87dc9b8fe1f7f9e8dbcef96 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72_0.conda#4a3014a4d107d15475d106b751c4e352 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf +https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc @@ -201,72 +208,79 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.cond https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py310hd41b1e2_0.conda#684399f9ddc0b9d6f3b6164f6107098e https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py310h2372a71_0.conda#13df1c4ea94f2e3326b15da1999e5999 -https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py310h5764c6d_1.tar.bz2#fd18cd597d23b2b5ddde23bd5b7aec32 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py310h2372a71_0.conda#d3d83b419c81ac718a9221442707882b -https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.7.0-pyha770c72_0.conda#ba3786c6846e46038fe60c785d46dc81 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_0.conda#4efe3a76fe724778a7235a2046b53233 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py310h2372a71_0.conda#f939fe2998c888a77b310926a6c666f3 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 -https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py310h582fbeb_1.conda#cf62f6cff3536eafaaa0c740b0bf7465 -https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py310h582fbeb_0.conda#adcc7ea52e4d39d0a93f6a2ef36c7fd4 +https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.2-pyhd8ed1ab_1.conda#f2465696f4396245eca4613f6e924796 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h056c13c_1.conda#32d925cfd330e0cbb72b7618558a44e8 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py310hc6cd4ac_0.conda#a3217e1bff09702dfdfcb536825fc12d -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#ae465d0fbf9f1979cb2d8d4043d885e2 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py310hc6cd4ac_0.conda#be1a7e420b7bac4ee02353d0e3161918 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.7.0-hd8ed1ab_0.conda#27a4cec373ec84d1c1aa02a1e37f8eaf -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py310he60537e_0.conda#68b2dd34c69d08b05a9db5e3596fe3ee -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py310h7cbd5c2_0.conda#e0b845c6b29a1ed2e409bef6c0f5d96b +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py310hf38f957_0.conda#9b55c9041c5a7f80f184a2cb05ec9663 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py310h7cbd5c2_1.conda#11e0099d4571b4974c04386e4ce679ed https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.6.0-pyhd8ed1ab_0.conda#741384b21c1b512617f4ee4ea8457c5d +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py310h24ef57a_1.conda#a689e86d7bbab67f889fc384aa72b088 -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310heca2aa9_3.conda#3b1946b676534472ce65181dda0b9554 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py310hc6cd4ac_4.conda#345beb10601d5360a15c033d68165a4f https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.6.0-pyhd8ed1ab_0.conda#e2c66ccd8a5eedaddcb23739ed38ed27 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hde23a83_100.conda#d5de42b3b49fb20e01d1003085ef588f https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.1-pyhd8ed1ab_0.conda#838b85f656b078bdd882ef97978e7f40 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py310ha4c1d20_3.conda#0414d57832172f3cdcf56b5f053e177d +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py310ha4c1d20_0.conda#300d3b434872eb84965864f0fcc5b5da +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_4.conda#db878a0696f9a7980171fd3cf29cca22 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py310hff52083_0.conda#7e454b4a61754714a4a4d183641374da +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310h6f5dce6_101.conda#0d50bea104512f2728676a8bff8840d3 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_1.conda#c6b2e7903121c3210462a0866a561993 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py310h7eb24ba_1.conda#e727db22a14344608c2caeccaa9e9d2b -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.0.5-h28d9a01_0.conda#597e2d0e1c6bc2e4457714ff479fe142 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310hab646b1_3.conda#d049da3204bf5ecb54a852b622f2d7d2 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py310hff52083_0.conda#c2b60c44d38d32779006a15c2581f0d1 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 +https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index e0919fe2ef..42e5224fe1 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,9 +1,9 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 2f069ddfd9b505e06a2c4ed71dff8be24543629660bd8a39c1a41dde291bd352 +# input_hash: b73fe0fbcf5caf5854030c02a6233bae6e4061e9f4175a5d8810c6bb3d7701b2 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb @@ -21,6 +21,7 @@ https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 @@ -33,7 +34,7 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -47,13 +48,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h803 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -68,15 +69,19 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d -https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -84,27 +89,29 @@ https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.cond https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_0.conda#aa8b86066614c4573f6db62c91978fa9 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_0.conda#276339b0115d92c6e0793dcdc7afe308 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.4-hab00c5b_0_cpython.conda#1c628861a2a126b9fc9363ca1b7d014e https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc @@ -117,25 +124,28 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.con https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.5.7-pyhd8ed1ab_0.conda#5d1b71c942b8421285934dad1d891ebc +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py311ha362b79_9.conda#ced5340f5dc6cff43a80deac8d0e398f +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.1.0-pyhd8ed1ab_0.conda#7fcff9f6f123696e940bda77bd4d6551 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.35-py311hb755f60_0.conda#17f4738a1ca6155a63d2a0cbd3e4a8b1 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py311hb755f60_0.conda#257dfede48699e2e6372528d08399e5a https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 -https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb +https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -143,25 +153,25 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py311h4dd048b_1.tar.bz2#46d451f575392c01dc193069bd89766d https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_0.conda#b10174a063ec195f8fe1b278282c3149 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_0.conda#9904dc4adb5d547cb21e136f98cb24b0 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.0-py311h64a7726_0.conda#4df60430eca64502eb01e02df92246bf +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py311h64a7726_0.conda#71fd6f1734a0fa64d8f852ae7156ec45 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h2582759_0.conda#a90f8e278c1cd7064b2713e6b7db87e6 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.0-pyhd8ed1ab_0.conda#d3ed087d1f7f8f5590e8e87b57a8ce64 +https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 @@ -173,26 +183,23 @@ https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py311h459d7ec_0.conda#12b1c374ee90a1aa11ea921858394dc8 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72_0.conda#4a3014a4d107d15475d106b751c4e352 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf +https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc @@ -200,72 +207,79 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.cond https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py311h9547e67_0.conda#daf3f23397ab2265d0cdfa339f3627ba https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py311h459d7ec_0.conda#3c2c65575c28b23afc5e4ff721a2fc9f -https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py311hd4cff14_1.tar.bz2#21523141b35484b1edafba962c6ea883 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py311h459d7ec_0.conda#b19f671a6b221f922cf871d71a71c0fa -https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.7.0-pyha770c72_0.conda#ba3786c6846e46038fe60c785d46dc81 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_0.conda#5c416db47b7816e437eaf0d46e5c3a3d +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py311h459d7ec_0.conda#8c1ac2c00995248898220c4c1a9d81ab +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 -https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py311h0b84326_1.conda#6be2190fdbf26a6c1d3356a54d955237 -https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py311h0b84326_0.conda#4b24acdc1fbbae9da03147e7d2cf8c8a +https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.2-pyhd8ed1ab_1.conda#f2465696f4396245eca4613f6e924796 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311h54d622a_1.conda#a894c65b48676c4973e9ee8b59bceb9e -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py311hb755f60_0.conda#2b5430f2f1651f460c852e1fdd549184 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#ae465d0fbf9f1979cb2d8d4043d885e2 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py311hb755f60_0.conda#17d25ab64a32872b349579fdb07bbdb2 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.7.0-hd8ed1ab_0.conda#27a4cec373ec84d1c1aa02a1e37f8eaf -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py311h8597a09_0.conda#70c3b734ffe82c16b6d121aaa11929a8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py311h320fe9a_0.conda#509769b430266dc5c2f6a3eab0f23164 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py311h54ef318_0.conda#2631a9e423855fb586c05f8a5ee8b177 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py311h320fe9a_1.conda#5f92f46bd33917832a99d1660b4075ac https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.6.0-pyhd8ed1ab_0.conda#741384b21c1b512617f4ee4ea8457c5d +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py311ha169711_1.conda#92633556d37e88ce45193374d408072c -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py311hcafe171_3.conda#0d79df2a96f6572fed2883374400b235 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py311hb755f60_4.conda#3cff4c98f775ff6439b95bb7917702e9 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.6.0-pyhd8ed1ab_0.conda#e2c66ccd8a5eedaddcb23739ed38ed27 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311h4d7c953_100.conda#c03492d0342e512e58aa2d6c5fdaaa91 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.1-pyhd8ed1ab_0.conda#838b85f656b078bdd882ef97978e7f40 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py311h64a7726_3.conda#a01a3a7428e770db5a0c8c7ab5fce7f7 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py311h64a7726_0.conda#356da36102fc1eeb8a81e6d79e53bc7e +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_4.conda#afe5363b88d2e97266063558a6599bd0 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py311h38be061_0.conda#c056ffab165096669389e5a4eea4dc4d +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311h9a7c333_101.conda#1dc70c7c3352c0ff1f861d866860db37 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_1.conda#c6b2e7903121c3210462a0866a561993 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py311hd88b842_1.conda#f19feb9440890ccb806a367ea9ae0654 -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.0.5-h28d9a01_0.conda#597e2d0e1c6bc2e4457714ff479fe142 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py311ha74522f_3.conda#ad6dd0bed0cdf5f2d4eb2b989d6253b3 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py311h38be061_0.conda#8fd462c8bcbba5a3affcb2d04e387476 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 +https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index a7b390c21d..f30b7d0405 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,9 +1,9 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: b3cb1f7bc6b32267d57b62a9f2f18ea72ba40a12bb0f57668771079837395d34 +# input_hash: a96712105b515671c42bd403fde393d6f10f99a02267d05c771ab9ca88f64093 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb @@ -21,6 +21,7 @@ https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 @@ -33,7 +34,7 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -47,13 +48,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h803 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -68,15 +69,19 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.67-he9d0100_0.conda#d05556c80caffff164d17bdea0105a1a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d -https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -84,27 +89,29 @@ https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.cond https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.4-h0d562d8_0.conda#e46fad17d5fb57316b956f88dca765e4 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_0.conda#aa8b86066614c4573f6db62c91978fa9 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_0.conda#753e078cccad40fe4b396bdcf27a3c15 +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_0.conda#276339b0115d92c6e0793dcdc7afe308 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.16-h2782a2a_0_cpython.conda#95c9b7c96a7fd7342e0c9d0a917b8f78 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc @@ -117,25 +124,28 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.con https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.5.7-pyhd8ed1ab_0.conda#5d1b71c942b8421285934dad1d891ebc +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py39h5a03fae_9.conda#d1601752c6f47af7bedf838be3d8ca6b +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.1.0-pyhd8ed1ab_0.conda#7fcff9f6f123696e940bda77bd4d6551 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.35-py39h3d6467e_0.conda#019c9509764e66c9d9d38b5ca365a9f4 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py39h3d6467e_0.conda#3d700ccea39ca04cb8b6210ac653e0b1 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py39hf3d152e_3.tar.bz2#4f0fa7459a1f40a969aaad418b1c428c -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 -https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb +https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.3-hfc55251_0.conda#8951eedf3cdf94dd733c1b5eee1f4880 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -143,25 +153,25 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_1.tar.bz2#41679a052a8ce841c74df1ebc802e411 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_0.conda#b10174a063ec195f8fe1b278282c3149 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_0.conda#9c858d105816f454c6b64f3e19184b60 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.0-py39h6183b62_0.conda#02b87fef8e4c72be8256435ed59fe8de +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py39h6183b62_0.conda#f1c358d06344bd7f9a293f9af4b9b8fc https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39h72bdee0_0.conda#1d54d3a75c3192ab7655d9c3d16809f1 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.0-pyhd8ed1ab_0.conda#d3ed087d1f7f8f5590e8e87b57a8ce64 +https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 @@ -173,100 +183,104 @@ https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py39hd1e30aa_0.conda#da334eecb1ea2248e28294c49e6f6d89 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72_0.conda#4a3014a4d107d15475d106b751c4e352 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf +https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py39h7633fee_0.conda#54e6f32e448fdc273606011f0940d076 -https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py39hb9d737c_1.tar.bz2#eb31327ace8dac15c2df243d9505a132 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.40.0-py39hd1e30aa_0.conda#5f7c468bf9d9551a80187db7e809ef1f -https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.3-hfc55251_0.conda#950e02f5665f5f4ff0437a6acba58798 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.7.0-pyha770c72_0.conda#ba3786c6846e46038fe60c785d46dc81 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.12.0-pyhd8ed1ab_0.conda#e5fd2260a231ee63b6969f4801082f2b +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_0.conda#434246edfc30e20c0847d4c2caff0a53 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py39hd1e30aa_0.conda#03e44d84ea9dd2432a633407401e5688 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.0.1-pyhd8ed1ab_0.conda#d978c61aa5fc2c69380d53ad56b5ae86 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_0.conda#e976871e132fe506da52c1240229246a +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 -https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py39haaeba84_1.conda#d7aa9b99ed6ade75fbab1e4cedcb3ce2 -https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py39haaeba84_0.conda#f97a95fab7c69678ebf6b57396b1323e +https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.2-pyhd8ed1ab_1.conda#f2465696f4396245eca4613f6e924796 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39hf1c3bca_1.conda#ae6bfe65e81d9b59a71cc01a2858650f -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.9-py39h3d6467e_0.conda#6d990f672cc70e5c480ddb74b789a17c -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#ae465d0fbf9f1979cb2d8d4043d885e2 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py39h3d6467e_0.conda#4eaef850715aff114e2126a2f1a7b1f0 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-5.12.0-pyhd8ed1ab_0.conda#3544c818f0720c89eb16ae6940ab440b -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.7.0-hd8ed1ab_0.conda#27a4cec373ec84d1c1aa02a1e37f8eaf -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py39h40cae4c_0.conda#de99b3f807c0b295a7df94623df0fb4c +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.0.1-pyhd8ed1ab_0.conda#54661981fd331e20847d8a49543dd9af +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py39h40cae4c_1.conda#cfe677f02e507f76d6767379e4ff09a9 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.6.0-pyhd8ed1ab_0.conda#741384b21c1b512617f4ee4ea8457c5d +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py39h5ed0f51_1.conda#9c455b3b3b55f13b2094932740cd3efb -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h227be39_3.conda#9e381db00691e26bcf670c3586397be1 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py39h3d6467e_4.conda#b83a218fa97e9963c858d0db651a7506 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.6.0-pyhd8ed1ab_0.conda#e2c66ccd8a5eedaddcb23739ed38ed27 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py39he190548_0.conda#f2a931db797bb58bd335f4a857b4c898 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4e81c44_100.conda#360163b65cfd5e43ac60de5c6c3a2696 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py39h0126182_0.conda#61cee808ff7830fcceeb4f336cc738b1 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.1-pyhd8ed1ab_0.conda#838b85f656b078bdd882ef97978e7f40 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.1-h98fae49_0.conda#4b827ee65a747c4a24f2a6ac7f3ff093 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h0f8d45d_0.conda#74b1d479057aa11a70779c83262df85e https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py39h6183b62_3.conda#84c4007675da392fdb99faeefda69552 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py39h6183b62_0.conda#81212684c03e970520656f1a62ab9d39 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_4.conda#e12391692d70732bf1df08b7ecf40095 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py39hf3d152e_0.conda#6ce223b8b14df8bdfa72ac2a10c2fad3 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4218a78_101.conda#8f5c25bb7accd1954d8b7fc689c5975c +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_1.conda#c6b2e7903121c3210462a0866a561993 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py39h4bd5d67_1.conda#a60d65263a8ddbff5381ed91d4f6953e -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.0.5-h28d9a01_0.conda#597e2d0e1c6bc2e4457714ff479fe142 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h5c7b992_3.conda#19e30314fe824605750da905febb8ee6 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py39hf3d152e_0.conda#682772fa385911fb5efffbce21b269c5 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 +https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 From becc890709beca7f23f556ed11685d13cc7c8a51 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 10:55:56 +0100 Subject: [PATCH 029/134] Updated environment lockfiles (#5419) Co-authored-by: Lockfile bot --- requirements/locks/py39-linux-64.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index f30b7d0405..e598fba992 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -113,7 +113,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.cond https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 -https://conda.anaconda.org/conda-forge/linux-64/python-3.9.16-h2782a2a_0_cpython.conda#95c9b7c96a7fd7342e0c9d0a917b8f78 +https://conda.anaconda.org/conda-forge/linux-64/python-3.9.17-h0755675_0_cpython.conda#384886ac3580bba3541ce65c992eb192 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 From 915c1f40e4af34dac7f87881bce376f972d644a6 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Tue, 15 Aug 2023 13:57:40 +0100 Subject: [PATCH 030/134] Update Installation Guide (#5416) --- docs/src/installing.rst | 59 ++++++++++++++---------------------- docs/src/whatsnew/latest.rst | 3 ++ 2 files changed, 26 insertions(+), 36 deletions(-) diff --git a/docs/src/installing.rst b/docs/src/installing.rst index cff9a27952..a8207c37a5 100644 --- a/docs/src/installing.rst +++ b/docs/src/installing.rst @@ -3,30 +3,26 @@ Installing ========== -Iris is available using conda for the following platforms: - -* Linux 64-bit, -* Mac OSX 64-bit, and -* Windows 64-bit. - -Windows 10 now has support for Linux distributions via WSL_ (Windows -Subsystem for Linux). This is a great option to get started with Iris -for users and developers. Be aware that we do not currently test against -any WSL_ distributions. - -.. _WSL: https://learn.microsoft.com/en-us/windows/wsl/install +Iris can be installed using conda or pip. .. note:: Iris is currently supported and tested against |python_support| running on Linux. We do not currently actively test on other platforms such as Windows or macOS. + Windows 10 now has support for Linux distributions via WSL_ (Windows + Subsystem for Linux). This is a great option to get started with + Iris for users and contributors. Be aware that we do not currently + test against any WSL_ distributions. + +.. _WSL: https://learn.microsoft.com/en-us/windows/wsl/install + .. note:: This documentation was built using Python |python_version|. .. _installing_using_conda: -Installing Using Conda (Users) ------------------------------- +Installing a Released Version Using Conda +----------------------------------------- To install Iris using conda, you must first download and install conda, for example from https://docs.conda.io/en/latest/miniconda.html. @@ -44,33 +40,24 @@ need the Iris sample data. This can also be installed using conda:: Further documentation on using conda and the features it provides can be found at https://docs.conda.io/projects/conda/en/latest/index.html. -.. _installing_from_source_without_conda: - -Installing from Source Without Conda on Debian-Based Linux Distros (Developers) -------------------------------------------------------------------------------- +.. _installing_using_pip: -Iris can also be installed without a conda environment. The instructions in -this section are valid for Debian-based Linux distributions (Debian, Ubuntu, -Kubuntu, etc.). +Installing a Released Version Using Pip +--------------------------------------- -Iris and its dependencies need some shared libraries in order to work properly. -These can be installed with apt:: +Iris is also available from https://pypi.org/ so can be installed with ``pip``:: - sudo apt-get install python3-pip python3-tk libudunits2-dev libproj-dev proj-bin libgeos-dev libcunit1-dev + pip install scitools-iris -The rest can be done with pip:: - - pip3 install scitools-iris +If you wish to run any of the code in the gallery you will also +need the Iris sample data. This can also be installed using pip:: -This procedure was tested on a Ubuntu 20.04 system on the -26th of July, 2021. -Be aware that through updates of the involved Debian packages, -dependency conflicts might arise or the procedure might have to be modified. + pip install iris-sample-data .. _installing_from_source: -Installing from Source with Conda (Developers) ----------------------------------------------- +Installing a Development Version from a Git Checkout +---------------------------------------------------- The latest Iris source release is available from https://github.com/SciTools/iris. @@ -78,9 +65,9 @@ https://github.com/SciTools/iris. For instructions on how to obtain the Iris project source from GitHub see :ref:`forking` and :ref:`set-up-fork` for instructions. -Once conda is installed, you can install Iris using conda and then activate -it. The example commands below assume you are in the root directory of your -local copy of Iris:: +Once conda is installed, you can create a development environment for Iris +using conda and then activate it. The example commands below assume you are in +the root directory of your local copy of Iris:: conda env create --force --file=requirements/iris.yml conda activate iris-dev diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 6eade29273..ed3b8092ba 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -94,6 +94,9 @@ This document explains the changes made to Iris for this release and :meth:`~iris.coords.Coord.convert_units` by including a link to the UDUNITS-2 documentation which contains lists of compatible units and aliases for them. +#. `@rcomer`_ updated the :ref:`Installation Guide` to reflect + that some things are now simpler. (:pull:`5416`) + 💼 Internal =========== From 8361532d141a16ec44ecf65e85162c583c7c6d06 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 16 Aug 2023 13:33:56 +0100 Subject: [PATCH 031/134] Update standard-names to #82 + add table version in std_names.py (#5423) --- docs/src/whatsnew/latest.rst | 6 + etc/cf-standard-name-table.xml | 2463 ++++++++++++++++-------------- lib/iris/tests/test_std_names.py | 10 +- tools/generate_std_names.py | 9 +- 4 files changed, 1334 insertions(+), 1154 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index ed3b8092ba..b500029789 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -41,6 +41,12 @@ This document explains the changes made to Iris for this release the data variable's ``coordinates`` attribute when saving to NetCDF. (:issue:`5206`, :pull:`5389`) +#. `@pp-mo`_ modified the install process to record the release version of the CF + standard-names table, when it creates the ``iris/std_names.py`` module. + The release number is also now available as + ``iris.std_names.CF_STANDARD_NAMES_TABLE_VERSION``. + (:pull:`5423`) + 🐛 Bugs Fixed ============= diff --git a/etc/cf-standard-name-table.xml b/etc/cf-standard-name-table.xml index 3b145ae86e..6e3c014849 100644 --- a/etc/cf-standard-name-table.xml +++ b/etc/cf-standard-name-table.xml @@ -1,7 +1,7 @@ - 81 - 2023-04-25T10:43:33Z + 82 + 2023-07-06T13:17:07Z Centre for Environmental Data Analysis support@ceda.ac.uk @@ -269,14 +269,14 @@ m - The altitude at top of atmosphere boundary layer is the elevation above sea level of the top of the (atmosphere) planetary boundary layer. The phrase "defined_by" provides the information of the tracer used for identifying the atmospheric boundary layer top. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. "By ranging instrument" means that the backscattering is obtained through ranging techniques like lidar and radar. + The altitude at top of atmosphere boundary layer is the elevation above sea level of the top of the (atmosphere) planetary boundary layer. The phrase "defined_by" provides the information of the tracer used for identifying the atmospheric boundary layer top. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. "By ranging instrument" means that the backscattering is obtained through ranging techniques like lidar and radar. m - The altitude at top of atmosphere mixed layer is the elevation above sea level of the top of the (atmosphere) mixed layer or convective boundary layer. The phrase "defined_by" provides the information of the tracer used for identifying the atmospheric boundary layer top. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. "By ranging instrument" means that the volume backscattering coefficient is obtained through ranging techniques like lidar and radar. + The altitude at top of atmosphere mixed layer is the elevation above sea level of the top of the (atmosphere) mixed layer or convective boundary layer. The phrase "defined_by" provides the information of the tracer used for identifying the atmospheric boundary layer top. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. "By ranging instrument" means that the volume backscattering coefficient is obtained through ranging techniques like lidar and radar. @@ -469,21 +469,21 @@ - J kg -1 + J kg-1 Convective(ly) available potential energy (often abbreviated CAPE) is a stability measure calculated by integrating the positive temperature difference between the surrounding atmosphere and a parcel of air lifted adiabatically from the surface to its equilibrium level. CAPE exists under conditions of potential instability, and measures the potential energy per unit mass that would be released by the unstable parcel if it were able to convect upwards to equilibrium. - J kg -1 + J kg-1 Convective inhibition is the amount of energy per unit mass required to overcome the negatively buoyant energy exerted by the environment on a parcel of air. Convective inhibition is often abbreviated as "CIN" or "CINH". It is calculated by integrating the negative temperature difference between the surrounding atmosphere and a parcel of air lifted adiabatically from a given starting height to its equilibrium level. A coordinate variable of original_air_pressure_of_lifted_parcel should be specified to indicate the starting height of the lifted parcel. - J kg -1 + J kg-1 Convective inhibition is the amount of energy per unit mass required to overcome the negatively buoyant energy exerted by the environment on a parcel of air. Convective inhibition is often abbreviated as "CIN" or "CINH". It is calculated by integrating the negative temperature difference between the surrounding atmosphere and a parcel of air lifted adiabatically from the surface to its equilibrium level. @@ -3170,6 +3170,20 @@ The phrase "ratio_of_X_to_Y" means X/Y. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Also known as specific gravity, where soil represents a dry soil sample. The density of a substance is its mass per unit volume. + + degree_north + + + The latitude of deployment of a station or instrument. The term can be used whenever the deployment position of a station or instrument needs to be supplied along with other types of positions. If a data variable has only one latitude coordinate variable, the standard name of latitude should generally be preferred to deployment_latitude, because latitude is recognised by generic software. If the deployment latitude is also the nominal latitude for a discrete geometry (as in Section 9.5 of the CF convention), the deployment latitude should also, or instead, be recorded in a coordinate variable with the standard name of latitude and axis="Y". Latitude is positive northward; its units of "degree_north" (or equivalent) indicate this explicitly. + + + + degree_east + + + The longitude of deployment of a station or instrument. The term can be used whenever the deployment position of a station or instrument needs to be supplied along with other types of positions. If a data variable has only one longitude coordinate variable, the standard name of longitude should generally be preferred to deployment_longitude, because longitude is recognised by generic software. If the deployment longitude is also the nominal longitude for a discrete geometry (as in Section 9.5 of the CF convention), the deployment longitude should also, or instead, be recorded in a coordinate variable with the standard name of longitude and axis="X". Longitude is positive eastward; its units of "degree_east" (or equivalent) indicate this explicitly. + + m @@ -3720,7 +3734,7 @@ kg m-2 - The quantity with standard name drainage_amount_through_base_of_soil_model is the amount of water that drains through the bottom of a soil column extending from the surface to a specified depth. “Drainage” is the process of removal of excess water from soil by gravitational flow. "Amount" means mass per unit area. A vertical coordinate variable or scalar coordinate with standard name "depth" should be used to specify the depth to which the soil column extends. + The quantity with standard name drainage_amount_through_base_of_soil_model is the amount of water that drains through the bottom of a soil column extending from the surface to a specified depth. "Drainage" is the process of removal of excess water from soil by gravitational flow. "Amount" means mass per unit area. A vertical coordinate variable or scalar coordinate with standard name "depth" should be used to specify the depth to which the soil column extends. @@ -4437,6 +4451,13 @@ Longitude is positive eastward; its units of degree_east (or equivalent) indicate this explicitly. In a latitude-longitude system defined with respect to a rotated North Pole, the standard name of grid_longitude should be used instead of longitude. Grid longitude is positive in the grid-eastward direction, but its units should be plain degree. + + mol m-3 s-1 + + + "Gross mole production" means the rate of creation of biomass per unit volume with no correction for respiration loss in terms of quantity of matter (moles). The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Prokaryotes" means all Bacteria and Archaea excluding photosynthetic cyanobacteria such as Synechococcus and Prochlorococcus or other separately named components of the prokaryotic population. + + kg m-2 s-1 @@ -4458,6 +4479,13 @@ "Production of carbon" means the production of biomass expressed as the mass of carbon which it contains. Gross primary production is the rate of synthesis of biomass from inorganic precursors by autotrophs ("producers"), for example, photosynthesis in plants or phytoplankton. The producers also respire some of this biomass and the difference is "net_primary_production". "Productivity" means production per unit area. The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + + kg m-3 s-1 + + + "Gross production" means the rate of creation of biomass per unit volume with no correction for respiration. The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Prokaryotes" means all Bacteria and Archaea excluding photosynthetic cyanobacteria such as Synechococcus and Prochlorococcus or other separately named components of the prokaryotic population. + + 1 @@ -7854,7 +7882,7 @@ - J Kg-1 + J kg-1 The lightning_potential_index measures the potential for charge generation and separation that leads to lightning flashes in convective thunderstorms. It is derived from the model simulated grid-scale updraft velocity and the mass mixing-ratios of liquid water, cloud ice, snow, and graupel. @@ -8539,6 +8567,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine nitrate is ClONO2. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. The chemical formula of chlorophyll-a is C55H72O5N4Mg. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + + kg m-3 @@ -9260,6 +9295,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Petroleum hydrocarbons are compounds containing just carbon and hydrogen originating from the fossil fuel crude oil. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Phaeopigments are non-photosynthetic pigments that are the degradation product of algal chlorophyll pigments. It is commonly formed during and after marine phytoplankton blooms. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + + kg m-3 @@ -9351,6 +9393,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol takes up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the aerosol. "Dry aerosol particles" means aerosol particles without any water uptake. "Primary particulate organic matter " means all organic matter emitted directly to the atmosphere as particles except elemental carbon. The sum of primary_particulate_organic_matter_dry_aerosol and secondary_particulate_organic_matter_dry_aerosol is particulate_organic_matter_dry_aerosol. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Prokaryotes" means all Bacteria and Archaea excluding photosynthetic cyanobacteria such as Synechococcus and Prochlorococcus or other separately named components of the prokaryotic population. + + kg m-3 @@ -11669,7 +11718,7 @@ - mol/m3 + mol m-3 "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved organic nitrogen" describes the nitrogen held in carbon compounds in solution. These are mostly generated by plankton excretion and decay. @@ -12123,6 +12172,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ozone is O3. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + mol m-3 @@ -12130,6 +12186,41 @@ "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. Phosphorus means phosphorus in all chemical forms, commonly referred to as "total phosphorus". + + + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + mol m-3 @@ -12137,6 +12228,27 @@ Mole concentration means number of moles per unit volume, also called"molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + mol m-3 @@ -12235,6 +12347,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Picophytoplankton are phytoplankton of less than 2 micrometers in size. Phytoplankton are algae that grow where there is sufficient light to support photosynthesis. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Prokaryotes" means all Bacteria and Archaea excluding photosynthetic cyanobacteria such as Synechococcus and Prochlorococcus or other separately named components of the prokaryotic population. + + mol m-3 @@ -13877,7 +13996,7 @@ W - "Northward" indicates a vector component which is positive when directed northward (negative southward). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. + "Northward" indicates a vector component which is positive when directed northward (negative southward). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. @@ -14409,7 +14528,7 @@ kg s-1 - The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. + The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. @@ -14759,7 +14878,7 @@ kg s-1 - "y" indicates a vector component along the grid y-axis, positive with increasing y. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. + "y" indicates a vector component along the grid y-axis, positive with increasing y. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. @@ -14920,7 +15039,7 @@ m s-1 - "Heave rate" is the rate of displacement along the local vertical axis. Heave rate might not include changes to the “at rest” position of the platform with respect to the axis of displacement, which may change over time. The standard name platform_heave_rate should be chosen only if the sign convention of the data is unknown. For cases where the sign convention of the heave rate is known, a standard name of platform_heave_rate_down or platform_heave_rate_up should be chosen, as appropriate. A "platform" is a structure or vehicle that serves as a base for mounting sensors. Platforms include, but are not limited to, satellites, aeroplanes, ships, buoys, instruments, ground stations, and masts. + "Heave rate" is the rate of displacement along the local vertical axis. Heave rate might not include changes to the "at rest" position of the platform with respect to the axis of displacement, which may change over time. The standard name platform_heave_rate should be chosen only if the sign convention of the data is unknown. For cases where the sign convention of the heave rate is known, a standard name of platform_heave_rate_down or platform_heave_rate_up should be chosen, as appropriate. A "platform" is a structure or vehicle that serves as a base for mounting sensors. Platforms include, but are not limited to, satellites, aeroplanes, ships, buoys, instruments, ground stations, and masts. @@ -18154,7 +18273,7 @@ sr - The ratio of volume extinction coefficient to volume backwards scattering coefficient by ranging instrument in air due to ambient aerosol particles (often called "lidar ratio") is the ratio of the "volume extinction coefficient" and the "volume backwards scattering coefficient of radiative flux by ranging instrument in air due to ambient aerosol particles". The ratio is assumed to be related to the same wavelength as the incident radiation. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + The ratio of volume extinction coefficient to volume backwards scattering coefficient by ranging instrument in air due to ambient aerosol particles (often called "lidar ratio") is the ratio of the "volume extinction coefficient" and the "volume backwards scattering coefficient of radiative flux by ranging instrument in air due to ambient aerosol particles". The ratio is assumed to be related to the same wavelength of incident radiation. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. @@ -18388,6 +18507,13 @@ The sea_floor_depth_below_sea_surface is the vertical distance between the sea surface and the seabed as measured at a given point in space including the variance caused by tides and possibly waves. + + s + + + "Sea floor sediment" is sediment deposited at the sea bed. "Sediment age" means the length of time elapsed since the sediment was deposited. The phrase "before_1950" is a transparent representation of the phrase "before_present", often used in the geological and archaeological domains to refer to time elapsed between an event and 1950 AD. + + m @@ -19704,6 +19830,13 @@ Speed is the magnitude of velocity. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Due to tides" means due to all astronomical gravity changes which manifest as tides. No distinction is made between different tidal components. + + s-1 + + + Speed is the magnitude of velocity. Sea water speed shear is the derivative of sea water speed with respect to depth. + + K 80 @@ -20355,6 +20488,13 @@ "specific" means per unit mass. Dry energy is the sum of dry static energy and kinetic energy. Dry static energy is the sum of enthalpy and potential energy (itself the sum of gravitational and centripetal potential energy). Enthalpy can be written either as (1) CpT, where Cp is heat capacity at constant pressure, T is absolute temperature, or (2) U+pV, where U is internal energy, p is pressure and V is volume. + + J kg-1 + + + The specific_enthalpy_of_air is the enthalpy of air per unit mass, which can be computed for an air sample as the sum of the enthalpy of the dry air and the enthalpy of the water vapor in that air, divided by the mass of dry air. + + J kg-1 @@ -20404,6 +20544,13 @@ "Specific" means per unit mass. "Turbulent kinetic energy" is the kinetic energy of chaotic fluctuations of the fluid flow. The dissipation of kinetic energy arises in ocean models as a result of the viscosity of sea water. + + m2 s-2 + + + Specific means per unit mass. "Turbulent kinetic energy" is the kinetic energy of all eddy-induced motion that is not resolved on the grid scale of the model. + + m2 s-2 @@ -24594,8 +24741,7 @@ kg m-2 s-1 - Methane emitted from the surface, generated by biomass burning (fires). Positive direction upwards. -The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The chemical formula for methane is CH4. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. the surface of the earth). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The term "fires" means all biomass fires, whether naturally occurring or ignited by humans. The precise conditions under which fires produce and consume methane can vary between models. + Methane emitted from the surface, generated by biomass burning (fires). Positive direction upwards. The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The chemical formula for methane is CH4. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. the surface of the earth). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The term "fires" means all biomass fires, whether naturally occurring or ignited by humans. The precise conditions under which fires produce and consume methane can vary between models. @@ -30335,7 +30481,7 @@ The surface called "surface" means the lower boundary of the atmospher W m-2 - The phrase "tendency_of_X" means derivative of X with respect to time. The phrase "expressed_as_heat_content" means that this quantity is calculated as the specific heat capacity times density of sea water multiplied by the conservative temperature of the sea water in the grid cell and integrated over depth. If used for a layer heat content, coordinate bounds should be used to define the extent of the layers. If no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is sea water the integral is assumed to be calculated over the full depth of the ocean. Conservative Temperature is defined as part of the Thermodynamic Equation of Seawater 2010 (TEOS-10) which was adopted in 2010 by the International Oceanographic Commission (IOC). Conservative Temperature is specific potential enthalpy (which has the standard name sea_water_specific_potential_enthalpy) divided by a fixed value of the specific heat capacity of sea water, namely cp_0 = 3991.86795711963 J kg-1 K-1. Conservative Temperature is a more accurate measure of the "heat content" of sea water, by a factor of one hundred, than is potential temperature. Because of this, it can be regarded as being proportional to the heat content of sea water per unit mass. Reference: www.teos-10.org; McDougall, 2003 doi: 10.1175/1520-0485(2003)033<0945:PEACOV>2.0.CO;2. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. Additionally, when the parameterized advective process is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic. The convergence of a skew-flux is identical (in the continuous formulation) to the convergence of an advective flux, making their tendencies the same. + The phrase "tendency_of_X" means derivative of X with respect to time. The phrase "expressed_as_heat_content" means that this quantity is calculated as the specific heat capacity times density of sea water multiplied by the conservative temperature of the sea water in the grid cell and integrated over depth. If used for a layer heat content, coordinate bounds should be used to define the extent of the layers. If no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is sea water the integral is assumed to be calculated over the full depth of the ocean. Conservative Temperature is defined as part of the Thermodynamic Equation of Seawater 2010 (TEOS-10) which was adopted in 2010 by the International Oceanographic Commission (IOC). Conservative Temperature is specific potential enthalpy (which has the standard name sea_water_specific_potential_enthalpy) divided by a fixed value of the specific heat capacity of sea water, namely cp_0 = 3991.86795711963 J kg-1 K-1. Conservative Temperature is a more accurate measure of the "heat content" of sea water, by a factor of one hundred, than is potential temperature. Because of this, it can be regarded as being proportional to the heat content of sea water per unit mass. Reference: www.teos-10.org; McDougall, 2003 doi: 10.1175/1520-0485(2003)033<0945:PEACOV>2.0.CO;2. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. Additionally, when the parameterized advective process is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic. The convergence of a skew-flux is identical (in the continuous formulation) to the convergence of an advective flux, making their tendencies the same. @@ -30384,7 +30530,7 @@ The surface called "surface" means the lower boundary of the atmospher W m-2 - The phrase "tendency_of_X" means derivative of X with respect to time. The phrase "expressed_as_heat_content" means that this quantity is calculated as the specific heat capacity times density of sea water multiplied by the potential temperature of the sea water in the grid cell and integrated over depth. If used for a layer heat content, coordinate bounds should be used to define the extent of the layers. If no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is sea water the integral is assumed to be calculated over the full depth of the ocean. Potential temperature is the temperature a parcel of air or sea water would have if moved adiabatically to sea level pressure. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. Additionally, when the parameterized advective process is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic. The convergence of a skew-flux is identical (in the continuous formulation) to the convergence of an advective flux, making their tendencies the same. + The phrase "tendency_of_X" means derivative of X with respect to time. The phrase "expressed_as_heat_content" means that this quantity is calculated as the specific heat capacity times density of sea water multiplied by the potential temperature of the sea water in the grid cell and integrated over depth. If used for a layer heat content, coordinate bounds should be used to define the extent of the layers. If no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is sea water the integral is assumed to be calculated over the full depth of the ocean. Potential temperature is the temperature a parcel of air or sea water would have if moved adiabatically to sea level pressure. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. Additionally, when the parameterized advective process is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic. The convergence of a skew-flux is identical (in the continuous formulation) to the convergence of an advective flux, making their tendencies the same. @@ -30475,7 +30621,7 @@ The surface called "surface" means the lower boundary of the atmospher kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. Additionally, when the parameterized advective process is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic. The convergence of a skew-flux is identical (in the continuous formulation) to the convergence of an advective flux, making their tendencies the same. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized submesoscale eddy advection occurs on a spatial scale of the order of 1 km horizontally. Reference: James C. McWilliams 2016, Submesoscale currents in the ocean, Proceedings of the Royal Society A: Mathematical, Physical and Engineering Sciences, volume 472, issue 2189. DOI: 10.1098/rspa.2016.0117. There are also standard names for parameterized_mesoscale_eddy_advection which, along with parameterized_submesoscale_eddy_advection, contributes to the total parameterized eddy advection. Additionally, when the parameterized advective process is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic. The convergence of a skew-flux is identical (in the continuous formulation) to the convergence of an advective flux, making their tendencies the same. @@ -31388,7 +31534,7 @@ The surface called "surface" means the lower boundary of the atmospher The "Ultraviolet Index" (UVI) is a measure of the amount of solar ultraviolet radiation that reaches the surface of the earth depending on factors such as time of day and cloud cover. It is often used to alert the public of the need to limit sun exposure and use sun creams to protect the skin. Each point on the Index scale is equivalent to 25 mW m-2 of UV radiation (reference: Australian Bureau of Meteorology, http://www.bom.gov.au/uv/about_uv_index.shtml). The UVI range is expressed as a numeric value from 0 to 20 and sometimes graphically as bands of color indicating the attendant risk of skin damage. A UVI of 0-2 is described as 'Low' (represented graphically in green); a UVI of 11 or greater is described as "Extreme" (represented graphically in purple). The higher the UVI, the greater the potential health risk to humans and the less time it takes for harm to occur. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Overcast" means a fractional sky cover of 95% or more when at least a portion of this amount is attributable to clouds or obscuring phenomena (such as haze, dust, smoke, fog, etc.) aloft. (Reference: AMS Glossary: http://glossary.ametsoc.org/wiki/Main_Page). Standard names are also defined for the quantities ultraviolet_index and ultraviolet_index_assuming_clear_sky. - + degree_C @@ -31724,13 +31870,6 @@ The surface called "surface" means the lower boundary of the atmospher The vertical_component_of_ocean_xy_tracer_diffusivity means the vertical component of the diffusivity of tracers in the ocean due to lateral mixing. This quantity could appear in formulations of lateral diffusivity in which "lateral" does not mean "iso-level", e.g. it would not be used for isopycnal diffusivity. "Tracer diffusivity" means the diffusivity of heat and salinity due to motion which is not resolved on the grid scale of the model. - - kg m-2 - - - “Drainage” is the process of removal of excess water from soil by gravitational flow. "Amount" means mass per unit area. The vertical drainage amount in soil is the amount of water that drains through the bottom of a soil column extending from the surface to a specified depth. - - m @@ -32298,6 +32437,20 @@ The surface called "surface" means the lower boundary of the atmospher Speed is the magnitude of velocity. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. A gust is a sudden brief period of high wind speed. In an observed timeseries of wind speed, the gust wind speed can be indicated by a cell_methods of maximum for the time-interval. In an atmospheric model which has a parametrised calculation of gustiness, the gust wind speed may be separately diagnosed from the wind speed. + + m s-1 + + + Speed is the magnitude of velocity. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. A gust is a sudden brief period of high wind speed. In an observed timeseries of wind speed, the gust wind speed can be indicated by a cell_methods of maximum for the time-interval. In an atmospheric model which has a parametrised calculation of gustiness, the gust wind speed may be separately diagnosed from the wind speed. The specification of a physical process by the phrase "due_to" process means that the quantity named is a single term in a list of terms, the maximum of which composes the general quantity named by omitting the phrase. + + + + m s-1 + + + Speed is the magnitude of velocity. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) The wind speed is the magnitude of the wind velocity. A gust is a sudden brief period of high wind speed. In an observed timeseries of wind speed, the gust wind speed can be indicated by a cell_methods of maximum for the time-interval. In an atmospheric model which has a parametrised calculation of gustiness, the gust wind speed may be separately diagnosed from the wind speed. The specification of a physical process by the phrase "due_to" process means that the quantity named is a single term in a list of terms, the maximum of which composes the general quantity named by omitting the phrase. + + s-1 N136 @@ -32390,2248 +32543,2256 @@ The surface called "surface" means the lower boundary of the atmospher - - moles_of_particulate_inorganic_carbon_per_unit_mass_in_sea_water + + isotropic_longwave_radiance_in_air - - temperature_in_ground + + isotropic_shortwave_radiance_in_air - - biological_taxon_lsid + + water_evapotranspiration_flux - - tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + drainage_amount_through_base_of_soil_model - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + mole_fraction_of_ozone_in_air - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + product_of_northward_wind_and_specific_humidity - - electrical_mobility_diameter_of_ambient_aerosol_particles + + radiation_wavelength - - diameter_of_ambient_aerosol_particles + + specific_gravitational_potential_energy - - mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air + + surface_drag_coefficient_for_heat_in_air - - effective_radius_of_stratiform_cloud_rain_particles + + surface_drag_coefficient_for_momentum_in_air - - effective_radius_of_stratiform_cloud_ice_particles + + surface_drag_coefficient_in_air - - effective_radius_of_stratiform_cloud_graupel_particles + + water_flux_into_sea_water - - effective_radius_of_convective_cloud_snow_particles + + wind_mixing_energy_flux_into_sea_water - - effective_radius_of_convective_cloud_rain_particles + + mole_fraction_of_chlorine_dioxide_in_air - - effective_radius_of_convective_cloud_ice_particles + + mole_fraction_of_chlorine_monoxide_in_air - - histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + mole_fraction_of_hypochlorous_acid_in_air - - backscattering_ratio_in_air + + surface_net_downward_radiative_flux - - soot_content_of_surface_snow + + surface_temperature - - liquid_water_content_of_surface_snow + + surface_temperature - - surface_snow_thickness + + surface_temperature - - thermal_energy_content_of_surface_snow + + surface_upward_sensible_heat_flux - - temperature_in_surface_snow + + mass_concentration_of_suspended_matter_in_sea_water - - integral_wrt_time_of_surface_downward_eastward_stress + + universal_thermal_comfort_index - - integral_wrt_time_of_surface_downward_northward_stress + + sea_surface_swell_wave_period - - sea_water_velocity_from_direction + + sea_surface_wind_wave_period - - sea_water_velocity_to_direction + + atmosphere_net_upward_convective_mass_flux - - sea_water_velocity_to_direction + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - integral_wrt_depth_of_product_of_salinity_and_sea_water_density + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - integral_wrt_depth_of_product_of_conservative_temperature_and_sea_water_density + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - integral_wrt_depth_of_product_of_potential_temperature_and_sea_water_density + + wave_frequency - - volume_fraction_of_condensed_water_in_soil_at_wilting_point + + northward_eliassen_palm_flux_in_air - - volume_fraction_of_condensed_water_in_soil_at_field_capacity + + northward_heat_flux_in_air_due_to_eddy_advection - - volume_fraction_of_condensed_water_in_soil_at_critical_point + + upward_eliassen_palm_flux_in_air - - volume_fraction_of_condensed_water_in_soil + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves - - product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity + + upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves - - product_of_lagrangian_tendency_of_air_pressure_and_geopotential_height + + lwe_thickness_of_atmosphere_mass_content_of_water_vapor - - product_of_lagrangian_tendency_of_air_pressure_and_air_temperature + + mass_content_of_cloud_condensed_water_in_atmosphere_layer - - product_of_lagrangian_tendency_of_air_pressure_and_air_temperature + + mass_content_of_cloud_ice_in_atmosphere_layer - - tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing + + mass_content_of_water_in_atmosphere_layer - - tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + mass_content_of_water_vapor_in_atmosphere_layer - - tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + tendency_of_atmosphere_mass_content_of_water_due_to_advection - - rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc + + tendency_of_atmosphere_mass_content_of_water_vapor - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_convection - - mole_fraction_of_inorganic_bromine_in_air + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_deep_convection - - water_vapor_saturation_deficit_in_air + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence - - tendency_of_atmosphere_moles_of_carbon_tetrachloride + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - tendency_of_atmosphere_moles_of_carbon_monoxide + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - platform_yaw + + tendency_of_middle_atmosphere_moles_of_carbon_monoxide - - platform_pitch + + tendency_of_middle_atmosphere_moles_of_methane - - platform_roll + + tendency_of_middle_atmosphere_moles_of_methyl_bromide - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization + + tendency_of_middle_atmosphere_moles_of_methyl_chloride - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton + + tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton + + tendency_of_troposphere_moles_of_carbon_monoxide - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms + + tendency_of_troposphere_moles_of_methane - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton + + tendency_of_troposphere_moles_of_methyl_bromide - - mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water + + tendency_of_troposphere_moles_of_methyl_chloride - - tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + tendency_of_troposphere_moles_of_molecular_hydrogen - - tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes + + mass_fraction_of_convective_cloud_condensed_water_in_air - - tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction + + mass_fraction_of_ozone_in_air - - volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - mole_fraction_of_noy_expressed_as_nitrogen_in_air + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_moles_of_methane + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion - - tendency_of_specific_humidity_due_to_stratiform_precipitation + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - tendency_of_air_temperature_due_to_stratiform_precipitation + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - stratiform_precipitation_flux + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission - - stratiform_precipitation_amount + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - lwe_thickness_of_stratiform_precipitation_amount + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - lwe_stratiform_precipitation_rate + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution - - water_evaporation_amount_from_canopy + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - water_evaporation_flux_from_canopy + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - precipitation_flux_onto_canopy + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - outgoing_water_volume_transport_along_river_channel + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission + + sea_surface_swell_wave_significant_height - - mass_fraction_of_mercury_dry_aerosol_particles_in_air + + sea_surface_wind_wave_significant_height - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice + + sea_surface_wave_significant_height - - surface_snow_density + + mass_content_of_water_in_soil_layer - - atmosphere_upward_relative_vorticity + + mass_content_of_water_in_soil - - atmosphere_upward_absolute_vorticity + + sea_surface_swell_wave_to_direction - - area_type + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - area_type + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - mass_fraction_of_liquid_precipitation_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - mass_fraction_of_liquid_precipitation_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton + + tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - nitrogen_growth_limitation_of_diazotrophic_phytoplankton + + eastward_water_vapor_flux_in_air - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + kinetic_energy_dissipation_in_atmosphere_boundary_layer - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + lwe_stratiform_snowfall_rate - - mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water + + lwe_thickness_of_stratiform_snowfall_amount - - mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water + + northward_water_vapor_flux_in_air - - iron_growth_limitation_of_diazotrophic_phytoplankton + + stratiform_rainfall_amount - - growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance + + stratiform_rainfall_flux - - air_pseudo_equivalent_potential_temperature + + stratiform_rainfall_rate - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water + + stratiform_snowfall_amount - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water + + stratiform_snowfall_flux - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water + + thickness_of_stratiform_rainfall_amount - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - + + thickness_of_stratiform_snowfall_amount + - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition + + atmosphere_mass_content_of_cloud_condensed_water - - stratiform_cloud_area_fraction + + atmosphere_mass_content_of_cloud_ice - - surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water + + atmosphere_mass_content_of_convective_cloud_condensed_water - - surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water + + atmosphere_mass_content_of_water_vapor - - surface_upwelling_radiance_per_unit_wavelength_in_air + + surface_downward_mole_flux_of_carbon_dioxide - - surface_upwelling_longwave_flux_in_air + + surface_upward_mole_flux_of_carbon_dioxide - - incoming_water_volume_transport_along_river_channel + + atmosphere_mass_content_of_sulfate - - sea_water_potential_temperature_expressed_as_heat_content + + atmosphere_mass_content_of_sulfate - - sea_water_potential_temperature_expressed_as_heat_content + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - sea_ice_temperature_expressed_as_heat_content + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - sea_ice_temperature_expressed_as_heat_content + + atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles - - surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky + + mass_fraction_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air - - surface_downwelling_shortwave_flux_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_gravitational_settling - - surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_turbulent_deposition - - surface_downwelling_radiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water + + atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles - - surface_downwelling_longwave_flux_in_air + + angstrom_exponent_of_ambient_aerosol_in_air - - integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air + + atmosphere_absorption_optical_thickness_due_to_dust_ambient_aerosol_particles - - integral_wrt_time_of_surface_downwelling_longwave_flux_in_air + + atmosphere_absorption_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles - - downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + atmosphere_mass_content_of_ammonium_dry_aerosol_particles - - downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + atmosphere_mass_content_of_dust_dry_aerosol_particles - - downwelling_radiative_flux_per_unit_wavelength_in_air + + atmosphere_mass_content_of_mercury_dry_aerosol_particles - - downwelling_radiance_per_unit_wavelength_in_sea_water + + atmosphere_mass_content_of_nitrate_dry_aerosol_particles - - downwelling_radiance_per_unit_wavelength_in_air + + atmosphere_mass_content_of_nitric_acid_trihydrate_ambient_aerosol_particles - - downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles - - downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - downwelling_photon_flux_per_unit_wavelength_in_sea_water + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles - - surface_upwelling_longwave_flux_in_air_assuming_clear_sky + + atmosphere_mass_content_of_water_in_ambient_aerosol_particles - - upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles - - upwelling_radiative_flux_per_unit_wavelength_in_air + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles - - upwelling_radiance_per_unit_wavelength_in_air + + atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles - - surface_upwelling_shortwave_flux_in_air + + atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + mass_concentration_of_dust_dry_aerosol_particles_in_air - - surface_upwelling_radiative_flux_per_unit_wavelength_in_air + + mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - surface_upwelling_radiance_per_unit_wavelength_in_sea_water + + mass_concentration_of_ammonium_dry_aerosol_particles_in_air - - platform_name + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - water_vapor_partial_pressure_in_air + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - effective_radius_of_stratiform_cloud_snow_particles + + mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_cfc11 + + mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air - - moles_of_cfc11_per_unit_mass_in_sea_water + + atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles - - atmosphere_moles_of_cfc11 + + mass_concentration_of_mercury_dry_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_cfc113 + + mass_concentration_of_nitrate_dry_aerosol_particles_in_air - - atmosphere_moles_of_cfc113 + + mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_cfc114 + + mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_moles_of_cfc114 + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_cfc115 + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - atmosphere_moles_of_cfc115 + + mass_concentration_of_sulfate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_cfc12 + + mass_concentration_of_water_in_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_halon2402 + + mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - atmosphere_moles_of_halon2402 + + mass_fraction_of_dust_dry_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_hcc140a + + mass_fraction_of_nitrate_dry_aerosol_particles_in_air - - atmosphere_moles_of_hcc140a + + mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - tendency_of_troposphere_moles_of_hcc140a + + mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - tendency_of_middle_atmosphere_moles_of_hcc140a + + mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - tendency_of_troposphere_moles_of_hcfc22 + + mass_fraction_of_water_in_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_hcfc22 + + mole_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - atmosphere_moles_of_hcfc22 + + mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - product_of_northward_wind_and_lagrangian_tendency_of_air_pressure + + number_concentration_of_ambient_aerosol_particles_in_air - - product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air - - floating_ice_shelf_area_fraction + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - atmosphere_moles_of_carbon_tetrachloride + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - mole_fraction_of_methylglyoxal_in_air + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition - - mole_fraction_of_dichlorine_peroxide_in_air + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling - - soil_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition - - slow_soil_pool_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition - - root_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition - - miscellaneous_living_matter_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition - - fast_soil_pool_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - medium_soil_pool_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - leaf_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - carbon_mass_content_of_forestry_and_agricultural_products + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - carbon_mass_content_of_forestry_and_agricultural_products + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil + + x_wind - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration + + y_wind - - eastward_transformed_eulerian_mean_air_velocity + + land_ice_surface_specific_mass_balance_rate - - surface_litter_mass_content_of_carbon + + land_ice_lwe_surface_specific_mass_balance_rate - - litter_mass_content_of_carbon + + isotropic_radiance_per_unit_wavelength_in_air - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition + + isotropic_radiance_per_unit_wavelength_in_air - - mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water + + omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water - - atmosphere_mass_content_of_convective_cloud_liquid_water + + mass_concentration_of_chlorophyll_in_sea_water - - effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top + + mass_concentration_of_chlorophyll_in_sea_water - - air_equivalent_temperature + + tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - air_pseudo_equivalent_temperature + + tendency_of_atmosphere_moles_of_methyl_bromide - - mass_content_of_cloud_liquid_water_in_atmosphere_layer + + tendency_of_atmosphere_moles_of_methyl_chloride - - air_equivalent_potential_temperature + + tendency_of_atmosphere_moles_of_molecular_hydrogen - - number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + tendency_of_atmosphere_moles_of_nitrous_oxide - - number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - effective_radius_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + atmosphere_moles_of_carbon_monoxide - - effective_radius_of_stratiform_cloud_liquid_water_particles + + sea_surface_wind_wave_to_direction - - effective_radius_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top + + sea_surface_wave_mean_period - - effective_radius_of_convective_cloud_liquid_water_particles + + equivalent_thickness_at_stp_of_atmosphere_ozone_content - - effective_radius_of_cloud_liquid_water_particles + + atmosphere_moles_of_methane - - atmosphere_mass_content_of_cloud_liquid_water + + atmosphere_moles_of_methyl_bromide - - atmosphere_moles_of_cfc12 + + atmosphere_moles_of_methyl_chloride - - tendency_of_atmosphere_moles_of_halon1202 + + atmosphere_moles_of_molecular_hydrogen - - atmosphere_moles_of_halon1202 + + atmosphere_moles_of_nitrous_oxide - - tendency_of_atmosphere_moles_of_halon1211 + + sea_water_x_velocity - - atmosphere_moles_of_halon1211 + + sea_water_y_velocity - - tendency_of_atmosphere_moles_of_halon1301 + + integral_wrt_time_of_air_temperature_deficit - - atmosphere_moles_of_halon1301 + + integral_wrt_time_of_air_temperature_excess - - platform_id + + integral_wrt_time_of_surface_downward_latent_heat_flux - - mass_flux_of_carbon_into_litter_from_vegetation + + integral_wrt_time_of_surface_downward_sensible_heat_flux - - subsurface_litter_mass_content_of_carbon + + atmosphere_convective_available_potential_energy - - stem_mass_content_of_carbon + + atmosphere_convective_available_potential_energy - - mole_concentration_of_dissolved_inorganic_14C_in_sea_water + + gross_primary_productivity_of_biomass_expressed_as_carbon - - surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon + + net_primary_productivity_of_biomass_expressed_as_carbon - - surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves - - mole_concentration_of_dissolved_inorganic_13C_in_sea_water + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots - - northward_transformed_eulerian_mean_air_velocity + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood - - surface_water_evaporation_flux + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - water_volume_transport_into_sea_water_from_rivers + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission - - stratiform_graupel_flux + + sea_surface_wind_wave_mean_period - - wood_debris_mass_content_of_carbon + + sea_surface_swell_wave_mean_period - - toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - water_flux_into_sea_water_from_rivers + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - integral_wrt_height_of_product_of_northward_wind_and_specific_humidity + + sea_surface_height_above_geoid - - integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity + + sea_surface_height_above_geoid - - integral_wrt_depth_of_sea_water_temperature + + sea_floor_depth_below_geoid - - integral_wrt_depth_of_sea_water_temperature + + air_pressure_at_mean_sea_level - - integral_wrt_depth_of_sea_water_temperature + + lagrangian_tendency_of_air_pressure - - integral_wrt_depth_of_sea_water_temperature + + lagrangian_tendency_of_air_pressure - - integral_wrt_depth_of_sea_water_practical_salinity + + mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air - - magnitude_of_sea_ice_displacement + + atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution - - surface_geostrophic_eastward_sea_water_velocity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires - - surface_geostrophic_northward_sea_water_velocity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion - - tendency_of_sea_surface_height_above_mean_sea_level + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport - - surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport - - surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition - - sea_surface_height_above_mean_sea_level + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition - - sea_surface_height_above_mean_sea_level + + tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation - - sea_floor_depth_below_mean_sea_level + + integral_wrt_time_of_surface_net_downward_longwave_flux - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + integral_wrt_time_of_surface_net_downward_shortwave_flux - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + integral_wrt_time_of_toa_net_downward_shortwave_flux - - mass_concentration_of_pm10_ambient_aerosol_particles_in_air + + integral_wrt_time_of_toa_outgoing_longwave_flux - - atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles + + northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + northward_ocean_salt_transport_due_to_parameterized_eddy_advection - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + ocean_heat_x_transport_due_to_parameterized_eddy_advection - - mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air + + ocean_heat_y_transport_due_to_parameterized_eddy_advection - - atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles + + ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - mass_concentration_of_pm1_ambient_aerosol_particles_in_air + + ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles + + tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + sea_water_x_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + sea_water_y_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission + + mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + northward_ocean_heat_transport_due_to_parameterized_eddy_advection - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + integral_wrt_depth_of_sea_water_practical_salinity - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + integral_wrt_depth_of_sea_water_temperature - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + integral_wrt_depth_of_sea_water_temperature - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + integral_wrt_depth_of_sea_water_temperature - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + integral_wrt_depth_of_sea_water_temperature - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity - - ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit + + integral_wrt_height_of_product_of_northward_wind_and_specific_humidity - - northward_ocean_heat_transport_due_to_parameterized_eddy_advection + + water_flux_into_sea_water_from_rivers - - tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol - - ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + wood_debris_mass_content_of_carbon - - ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + stratiform_graupel_flux - - upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + water_volume_transport_into_sea_water_from_rivers - - sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + surface_water_evaporation_flux - - sea_water_x_velocity_due_to_parameterized_mesoscale_eddies + + northward_transformed_eulerian_mean_air_velocity - - eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit - - northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - ocean_heat_y_transport_due_to_parameterized_eddy_advection + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - ocean_heat_x_transport_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission - - northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - integral_wrt_time_of_toa_outgoing_longwave_flux + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - integral_wrt_time_of_toa_net_downward_shortwave_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - integral_wrt_time_of_surface_net_downward_shortwave_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - integral_wrt_time_of_surface_net_downward_longwave_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling + + atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal + + mass_concentration_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport + + atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport + + mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution + + atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission + + mass_concentration_of_pm10_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles + + sea_floor_depth_below_mean_sea_level - - mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air + + sea_surface_height_above_mean_sea_level - - lagrangian_tendency_of_air_pressure + + sea_surface_height_above_mean_sea_level - - lagrangian_tendency_of_air_pressure + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - air_pressure_at_mean_sea_level + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - sea_floor_depth_below_geoid + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - sea_surface_height_above_geoid + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - sea_surface_height_above_geoid + + surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + tendency_of_sea_surface_height_above_mean_sea_level - - sea_surface_swell_wave_mean_period + + surface_geostrophic_northward_sea_water_velocity - - sea_surface_wind_wave_mean_period + + surface_geostrophic_eastward_sea_water_velocity - - sea_surface_wave_mean_period + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition - - sea_surface_wind_wave_to_direction + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition - - atmosphere_moles_of_carbon_monoxide + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - tendency_of_atmosphere_moles_of_nitrous_oxide + + mole_concentration_of_dissolved_inorganic_13C_in_sea_water - - tendency_of_atmosphere_moles_of_molecular_hydrogen + + surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C - - tendency_of_atmosphere_moles_of_methyl_chloride + + surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon - - tendency_of_atmosphere_moles_of_methyl_bromide + + mole_concentration_of_dissolved_inorganic_14C_in_sea_water - - y_wind + + stem_mass_content_of_carbon - - x_wind + + subsurface_litter_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition + + mass_flux_of_carbon_into_litter_from_vegetation - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition + + platform_id - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_moles_of_halon1301 - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_moles_of_halon1301 - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production + + atmosphere_moles_of_halon1211 - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production + + tendency_of_atmosphere_moles_of_halon1211 - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_moles_of_halon1202 - - tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_moles_of_halon1202 - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition + + atmosphere_moles_of_cfc12 - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_mass_content_of_cloud_liquid_water - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition + + effective_radius_of_cloud_liquid_water_particles - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition + + effective_radius_of_convective_cloud_liquid_water_particles - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling + + effective_radius_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition + + effective_radius_of_stratiform_cloud_liquid_water_particles - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition + + effective_radius_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition + + magnitude_of_sea_ice_displacement - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air + + air_equivalent_potential_temperature - - number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air + + mass_content_of_cloud_liquid_water_in_atmosphere_layer - - number_concentration_of_ambient_aerosol_particles_in_air + + air_pseudo_equivalent_temperature - - mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + air_equivalent_temperature - - mole_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top - - mass_fraction_of_water_in_ambient_aerosol_particles_in_air + + atmosphere_mass_content_of_convective_cloud_liquid_water - - mass_fraction_of_sulfate_dry_aerosol_particles_in_air + + mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water - - mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water - - mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water - - mass_fraction_of_nitrate_dry_aerosol_particles_in_air + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water - - mass_fraction_of_dust_dry_aerosol_particles_in_air + + air_pseudo_equivalent_potential_temperature - - mass_fraction_of_ammonium_dry_aerosol_particles_in_air + + growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance - - mass_concentration_of_water_in_ambient_aerosol_particles_in_air + + iron_growth_limitation_of_diazotrophic_phytoplankton - - mass_concentration_of_sulfate_dry_aerosol_particles_in_air + + mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + nitrogen_growth_limitation_of_diazotrophic_phytoplankton - - mass_concentration_of_nitrate_dry_aerosol_particles_in_air + + tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - mass_concentration_of_mercury_dry_aerosol_particles_in_air + + mass_fraction_of_liquid_precipitation_in_air - - atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles + + mass_fraction_of_liquid_precipitation_in_air - - mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air + + area_type - - mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + area_type - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur + + atmosphere_upward_absolute_vorticity - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur + + atmosphere_upward_relative_vorticity - - mass_concentration_of_ammonium_dry_aerosol_particles_in_air + + surface_snow_density - - mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice - - mass_concentration_of_dust_dry_aerosol_particles_in_air + + mass_fraction_of_mercury_dry_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission - - atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles + + tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice - - atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles + + outgoing_water_volume_transport_along_river_channel - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + precipitation_flux_onto_canopy - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + water_evaporation_flux_from_canopy - - atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + water_evaporation_amount_from_canopy - - atmosphere_mass_content_of_water_in_ambient_aerosol_particles + + lwe_stratiform_precipitation_rate - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles + + lwe_thickness_of_stratiform_precipitation_amount - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + stratiform_precipitation_amount - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + stratiform_precipitation_flux - - atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles + + tendency_of_air_temperature_due_to_stratiform_precipitation - - atmosphere_mass_content_of_nitric_acid_trihydrate_ambient_aerosol_particles + + tendency_of_specific_humidity_due_to_stratiform_precipitation - - atmosphere_mass_content_of_nitrate_dry_aerosol_particles + + tendency_of_atmosphere_moles_of_methane - - atmosphere_mass_content_of_mercury_dry_aerosol_particles + + mole_fraction_of_noy_expressed_as_nitrogen_in_air - - atmosphere_mass_content_of_dust_dry_aerosol_particles + + volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles - - atmosphere_mass_content_of_ammonium_dry_aerosol_particles + + tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction - - atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles + + tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes - - atmosphere_absorption_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes - - atmosphere_absorption_optical_thickness_due_to_dust_ambient_aerosol_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms - - angstrom_exponent_of_ambient_aerosol_in_air + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton - - atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + platform_roll - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition + + platform_pitch - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_turbulent_deposition + + platform_yaw - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_atmosphere_moles_of_carbon_monoxide - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_moles_of_carbon_tetrachloride - - mass_fraction_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning - - mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air + + water_vapor_saturation_deficit_in_air - - atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles + + mole_fraction_of_inorganic_bromine_in_air - - atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood + + tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves + + eastward_transformed_eulerian_mean_air_velocity - - net_primary_productivity_of_biomass_expressed_as_carbon + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration - - gross_primary_productivity_of_biomass_expressed_as_carbon + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil - - atmosphere_convective_available_potential_energy + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration - - atmosphere_convective_available_potential_energy + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth - - integral_wrt_time_of_surface_downward_sensible_heat_flux + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance - - integral_wrt_time_of_surface_downward_latent_heat_flux + + carbon_mass_content_of_forestry_and_agricultural_products - - integral_wrt_time_of_air_temperature_excess + + carbon_mass_content_of_forestry_and_agricultural_products - - integral_wrt_time_of_air_temperature_deficit + + leaf_mass_content_of_carbon - - sea_water_y_velocity + + medium_soil_pool_mass_content_of_carbon - - sea_water_x_velocity + + fast_soil_pool_mass_content_of_carbon - - mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + miscellaneous_living_matter_mass_content_of_carbon - - mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water + + root_mass_content_of_carbon - - mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water + + slow_soil_pool_mass_content_of_carbon - - mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water + + soil_mass_content_of_carbon - - atmosphere_moles_of_nitrous_oxide + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles - - atmosphere_moles_of_molecular_hydrogen + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles - - atmosphere_moles_of_methyl_chloride + + mole_fraction_of_dichlorine_peroxide_in_air - - atmosphere_moles_of_methyl_bromide + + mole_fraction_of_methylglyoxal_in_air - - atmosphere_moles_of_methane + + atmosphere_moles_of_carbon_tetrachloride - - equivalent_thickness_at_stp_of_atmosphere_ozone_content + + floating_ice_shelf_area_fraction - - tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + product_of_northward_wind_and_lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling + + atmosphere_moles_of_hcfc22 - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling + + tendency_of_atmosphere_moles_of_hcfc22 - - sea_surface_swell_wave_to_direction + + tendency_of_troposphere_moles_of_hcfc22 - - mass_content_of_water_in_soil + + tendency_of_middle_atmosphere_moles_of_hcc140a - - mass_content_of_water_in_soil_layer + + tendency_of_troposphere_moles_of_hcc140a - - sea_surface_wave_significant_height + + atmosphere_moles_of_hcc140a - - sea_surface_wind_wave_significant_height + + tendency_of_atmosphere_moles_of_hcc140a - - sea_surface_swell_wave_significant_height + + atmosphere_moles_of_halon2402 - - tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles + + upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - mass_concentration_of_chlorophyll_in_sea_water + + surface_upwelling_longwave_flux_in_air_assuming_clear_sky - - mass_concentration_of_chlorophyll_in_sea_water + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky - - omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water + + downwelling_photon_flux_per_unit_wavelength_in_sea_water - - isotropic_radiance_per_unit_wavelength_in_air + + downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - isotropic_radiance_per_unit_wavelength_in_air + + downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - land_ice_lwe_surface_specific_mass_balance_rate + + downwelling_radiance_per_unit_wavelength_in_air - - land_ice_surface_specific_mass_balance_rate + + downwelling_radiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion + + downwelling_radiative_flux_per_unit_wavelength_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires + + downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission + + downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission + + downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport + + integral_wrt_time_of_surface_downwelling_longwave_flux_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution + + integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning + + surface_downwelling_longwave_flux_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport + + surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission + + surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires + + surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal + + surface_downwelling_radiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion + + surface_downwelling_radiative_flux_per_unit_wavelength_in_air - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission + + surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission + + surface_downwelling_shortwave_flux_in_air - - tendency_of_troposphere_moles_of_molecular_hydrogen + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky - - tendency_of_troposphere_moles_of_methyl_chloride + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - tendency_of_troposphere_moles_of_methyl_bromide + + surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_troposphere_moles_of_methane + + sea_ice_temperature_expressed_as_heat_content - - tendency_of_troposphere_moles_of_carbon_monoxide + + sea_ice_temperature_expressed_as_heat_content - - tendency_of_middle_atmosphere_moles_of_molecular_hydrogen + + sea_water_potential_temperature_expressed_as_heat_content - - tendency_of_middle_atmosphere_moles_of_methyl_chloride + + sea_water_potential_temperature_expressed_as_heat_content - - tendency_of_middle_atmosphere_moles_of_methyl_bromide + + incoming_water_volume_transport_along_river_channel - - tendency_of_middle_atmosphere_moles_of_methane + + surface_upwelling_longwave_flux_in_air - - tendency_of_middle_atmosphere_moles_of_carbon_monoxide + + surface_upwelling_radiance_per_unit_wavelength_in_air - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection + + surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer + + surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence + + stratiform_cloud_area_fraction - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_deep_convection + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_convection + + litter_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_water_vapor + + surface_litter_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_water_due_to_advection + + tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing - - mass_content_of_water_vapor_in_atmosphere_layer + + product_of_lagrangian_tendency_of_air_pressure_and_air_temperature - - mass_content_of_water_in_atmosphere_layer + + product_of_lagrangian_tendency_of_air_pressure_and_air_temperature - - mass_content_of_cloud_ice_in_atmosphere_layer + + product_of_lagrangian_tendency_of_air_pressure_and_geopotential_height - - mass_content_of_cloud_condensed_water_in_atmosphere_layer + + product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity - - lwe_thickness_of_atmosphere_mass_content_of_water_vapor + + product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection + + volume_fraction_of_condensed_water_in_soil - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection + + volume_fraction_of_condensed_water_in_soil_at_critical_point - - atmosphere_mass_content_of_sulfate + + volume_fraction_of_condensed_water_in_soil_at_field_capacity - - atmosphere_mass_content_of_sulfate + + volume_fraction_of_condensed_water_in_soil_at_wilting_point - - surface_upward_mole_flux_of_carbon_dioxide + + integral_wrt_depth_of_product_of_potential_temperature_and_sea_water_density - - surface_downward_mole_flux_of_carbon_dioxide + + integral_wrt_depth_of_product_of_conservative_temperature_and_sea_water_density - - atmosphere_mass_content_of_water_vapor + + integral_wrt_depth_of_product_of_salinity_and_sea_water_density - - atmosphere_mass_content_of_convective_cloud_condensed_water + + sea_water_velocity_to_direction - - atmosphere_mass_content_of_cloud_ice + + sea_water_velocity_to_direction - - atmosphere_mass_content_of_cloud_condensed_water + + sea_water_velocity_from_direction - - thickness_of_stratiform_snowfall_amount + + integral_wrt_time_of_surface_downward_northward_stress - - thickness_of_stratiform_rainfall_amount + + integral_wrt_time_of_surface_downward_eastward_stress - - stratiform_snowfall_flux + + temperature_in_surface_snow - - stratiform_snowfall_amount + + thermal_energy_content_of_surface_snow - - stratiform_rainfall_rate + + surface_snow_thickness - - stratiform_rainfall_flux + + liquid_water_content_of_surface_snow - - stratiform_rainfall_amount + + soot_content_of_surface_snow - - northward_water_vapor_flux_in_air + + backscattering_ratio_in_air - - lwe_thickness_of_stratiform_snowfall_amount + + histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid - - lwe_stratiform_snowfall_rate + + effective_radius_of_convective_cloud_ice_particles - - kinetic_energy_dissipation_in_atmosphere_boundary_layer + + effective_radius_of_convective_cloud_rain_particles - - eastward_water_vapor_flux_in_air + + effective_radius_of_convective_cloud_snow_particles - - upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves + + effective_radius_of_stratiform_cloud_graupel_particles - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves + + effective_radius_of_stratiform_cloud_ice_particles - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves + + effective_radius_of_stratiform_cloud_rain_particles - - upward_eliassen_palm_flux_in_air + + mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air - - northward_heat_flux_in_air_due_to_eddy_advection + + diameter_of_ambient_aerosol_particles - - northward_eliassen_palm_flux_in_air + + electrical_mobility_diameter_of_ambient_aerosol_particles - - wave_frequency + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection + + tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection + + biological_taxon_lsid - - atmosphere_net_upward_convective_mass_flux + + temperature_in_ground - - mass_fraction_of_ozone_in_air + + tendency_of_atmosphere_moles_of_halon2402 - - mass_fraction_of_convective_cloud_condensed_water_in_air + + tendency_of_atmosphere_moles_of_cfc12 - - sea_surface_wind_wave_period + + atmosphere_moles_of_cfc115 - - sea_surface_swell_wave_period + + tendency_of_atmosphere_moles_of_cfc115 - - mass_concentration_of_suspended_matter_in_sea_water + + atmosphere_moles_of_cfc114 - - surface_upward_sensible_heat_flux + + tendency_of_atmosphere_moles_of_cfc114 - - surface_temperature + + atmosphere_moles_of_cfc113 - - surface_temperature + + tendency_of_atmosphere_moles_of_cfc113 - - surface_temperature + + atmosphere_moles_of_cfc11 - - surface_net_downward_radiative_flux + + moles_of_cfc11_per_unit_mass_in_sea_water - - mole_fraction_of_hypochlorous_acid_in_air + + tendency_of_atmosphere_moles_of_cfc11 - - mole_fraction_of_chlorine_monoxide_in_air + + effective_radius_of_stratiform_cloud_snow_particles - - mole_fraction_of_chlorine_dioxide_in_air + + water_vapor_partial_pressure_in_air - - wind_mixing_energy_flux_into_sea_water + + platform_name - - water_flux_into_sea_water + + surface_upwelling_radiance_per_unit_wavelength_in_sea_water - - surface_drag_coefficient_in_air + + surface_upwelling_radiative_flux_per_unit_wavelength_in_air - - surface_drag_coefficient_for_momentum_in_air + + surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - surface_drag_coefficient_for_heat_in_air + + surface_upwelling_shortwave_flux_in_air - - specific_gravitational_potential_energy + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - radiation_wavelength + + upwelling_radiance_per_unit_wavelength_in_air - - product_of_northward_wind_and_specific_humidity + + upwelling_radiative_flux_per_unit_wavelength_in_air - - mole_fraction_of_ozone_in_air + + upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - water_evapotranspiration_flux + + mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water - - isotropic_shortwave_radiance_in_air + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton - - isotropic_longwave_radiance_in_air + + moles_of_particulate_inorganic_carbon_per_unit_mass_in_sea_water diff --git a/lib/iris/tests/test_std_names.py b/lib/iris/tests/test_std_names.py index 2093d14bf8..48d32acbee 100644 --- a/lib/iris/tests/test_std_names.py +++ b/lib/iris/tests/test_std_names.py @@ -7,7 +7,7 @@ # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip -from iris.std_names import STD_NAMES +from iris.std_names import CF_STANDARD_NAMES_TABLE_VERSION, STD_NAMES class TestStandardNames(tests.IrisTest): @@ -18,7 +18,7 @@ class TestStandardNames(tests.IrisTest): longMessage = True - def test_standard_names(self): + def test_standard_names_table(self): # Check we have a dict self.assertIsInstance(STD_NAMES, dict) @@ -46,6 +46,12 @@ def test_standard_names(self): "\nInvalid standard name(s) present in STD_NAMES", ) + def test_standard_names_version(self): + # Check we have a dict + self.assertIsInstance(CF_STANDARD_NAMES_TABLE_VERSION, int) + # Check the value is roughly sensible. + self.assertTrue(70 < CF_STANDARD_NAMES_TABLE_VERSION < 999) + if __name__ == "__main__": tests.main() diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 08bacbe1e0..51a31ef971 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -37,6 +37,8 @@ This file is automatically generated. Do not edit this file by hand. +Generated from CF standard-name table version : {table_version} + The file will be generated during a standard build/installation:: python setup.py build @@ -76,6 +78,8 @@ def to_dict(infile, outfile): tree = ET.parse(infile) + cf_table_version_string = tree.find('version_number').text + for section in process_name_table(tree, 'entry', 'canonical_units'): values.update(section) @@ -87,7 +91,10 @@ def to_dict(infile, outfile): key : {'canonical_units' : values.get(valued['entry_id']).get('canonical_units')} }) - outfile.write(STD_VALUES_FILE_TEMPLATE + pprint.pformat(values)) + text = STD_VALUES_FILE_TEMPLATE.format(table_version=cf_table_version_string) + text += pprint.pformat(values) + text += f'\n\nCF_STANDARD_NAMES_TABLE_VERSION = {cf_table_version_string}\n' + outfile.write(text) if __name__ == "__main__": From 75ff780642423c6617c90ea7476a545adbca08d8 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 16 Aug 2023 16:14:01 +0100 Subject: [PATCH 032/134] V3.7.x.updates (#5424) * Whats new updates for v3.7.0rc0 . --- docs/src/whatsnew/{latest.rst => 3.7.rst} | 54 ++++++++--- docs/src/whatsnew/index.rst | 4 +- docs/src/whatsnew/latest.rst.template | 107 ---------------------- 3 files changed, 45 insertions(+), 120 deletions(-) rename docs/src/whatsnew/{latest.rst => 3.7.rst} (71%) delete mode 100644 docs/src/whatsnew/latest.rst.template diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/3.7.rst similarity index 71% rename from docs/src/whatsnew/latest.rst rename to docs/src/whatsnew/3.7.rst index b500029789..237e2873cb 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/3.7.rst @@ -1,23 +1,37 @@ .. include:: ../common_links.inc -|iris_version| |build_date| [unreleased] -**************************************** +v3.7 (16 Aug 2023) [release candidate] +************************************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: |iris_version| Release Highlights +.. dropdown:: v3.7 Release Highlights :color: primary :icon: info :animate: fade-in :open: - The highlights for this major/minor release of Iris include: + There are no major feature highlights for this release of Iris, but it's worth + noting that, in addition to some important bug fixes in specific areas, this time + we have made a number of improvements for user-experience and usability, + notably : - * N/A + * improved messaging for :ref:`CubeList.concatenate() ` + and :ref:`Cube.convert_units() `. - And finally, get in touch with us on :issue:`GitHub` if you have + * avoid warnings which may occur in :ref:`pp loading ` + and :ref:`contourf `. + + * :ref:`documentation supports Dark mode `. + + * :ref:`added a "Dask Best Practices" guide ` + ( :ref:`here ` ) . + + * :ref:`improved the Installation Guide `. + + Please do get in touch with us on :issue:`GitHub` if you have any issues or feature requests for improving Iris. Enjoy! @@ -33,8 +47,11 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles lazy data. (:pull:`5307`) -#. `@acchamber`_ added error and warning messages about coordinate overlaps to - :func:`~iris.cube.concatenate` to improve the concatenation process. (:pull:`5382`) +.. _concat_warnings: + +#. `@acchamber`_ added error and warning messages about coordinate overlaps to + :func:`~iris.cube.CubeList.concatenate` to improve the concatenation process. + (:pull:`5382`) #. `@trexfeathers`_ included mesh location coordinates (e.g. :attr:`~iris.experimental.ugrid.Mesh.face_coords`) in @@ -57,9 +74,13 @@ This document explains the changes made to Iris for this release #. `@acchamber`_ removed some obsolete code that prevented extraction of time points from cubes with bounded times (:pull:`5175`) +.. _cftime_warnings: + #. `@rcomer`_ modified pp-loading to avoid a ``cftime`` warning for non-standard calendars. (:pull:`5357`) +#. `@rsdavies`_ modified the CF compliant standard name for m01s00i023 (:issue:`4566`) + 💣 Incompatible Changes ======================= @@ -71,7 +92,6 @@ This document explains the changes made to Iris for this release =========================== #. `@rcomer`_ made :meth:`~iris.cube.Cube.aggregated_by` faster. (:pull:`4970`) -#. `@rsdavies`_ modified the CF compliant standard name for m01s00i023 :issue:`4566` 🔥 Deprecations =============== @@ -88,17 +108,27 @@ This document explains the changes made to Iris for this release 📚 Documentation ================ +.. _docs_dark: + #. `@tkknight`_ prepared the documentation for dark mode and enable the option to use it. By default the theme will be based on the users system settings, defaulting to ``light`` if no system setting is found. (:pull:`5299`) +.. _dask_guide: + #. `@HGWright`_ added a :doc:`/further_topics/dask_best_practices/index` section into the user guide, containing advice and use cases to help users - get the best out of Dask with Iris. + get the best out of Dask with Iris. (:pull:`5190`) + +.. _convert_docs: #. `@acchamber`_ improved documentation for :meth:`~iris.cube.Cube.convert_units` and :meth:`~iris.coords.Coord.convert_units` by including a link to the UDUNITS-2 documentation which contains lists of compatible units and aliases for them. + (:pull:`5388`) + + +.. _installdocs_update: #. `@rcomer`_ updated the :ref:`Installation Guide` to reflect that some things are now simpler. (:pull:`5416`) @@ -109,7 +139,9 @@ This document explains the changes made to Iris for this release #. `@pp-mo`_ supported loading and saving netcdf :class:`netCDF4.Dataset` compatible objects in place of file-paths, as hooks for a forthcoming `"Xarray bridge" `_ facility. - (:pull:`5214`) + (:pull:`5214`, :pull:`5212`) + +.. _contour_future: #. `@rcomer`_ updated :func:`~iris.plot.contourf` to avoid using functionality that is deprecated in Matplotlib v3.8 (:pull:`5405`) diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index dce7458a13..d2a15be1f7 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -5,13 +5,13 @@ What's New in Iris ------------------ -.. include:: latest.rst +.. include:: 3.7.rst .. toctree:: :maxdepth: 1 :hidden: - latest.rst + 3.7.rst 3.6.rst 3.5.rst 3.4.rst diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template deleted file mode 100644 index 966a91e976..0000000000 --- a/docs/src/whatsnew/latest.rst.template +++ /dev/null @@ -1,107 +0,0 @@ -.. include:: ../common_links.inc - -|iris_version| |build_date| [unreleased] -**************************************** - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -.. dropdown:: |iris_version| Release Highlights - :color: primary - :icon: info - :animate: fade-in - :open: - - The highlights for this major/minor release of Iris include: - - * N/A - - And finally, get in touch with us on :issue:`GitHub` if you have - any issues or feature requests for improving Iris. Enjoy! - - -NOTE: section BELOW is a template for bugfix patches -==================================================== - (Please remove this section when creating an initial 'latest.rst' - -|iris_version| |build_date| -=========================== - -.. dropdown:: |iris_version| Patches - :color: primary - :icon: alert - :animate: fade-in - - The patches in this release of Iris include: - - #. N/A - -NOTE: section ABOVE is a template for bugfix patches -==================================================== - (Please remove this section when creating an initial 'latest.rst') - - -📢 Announcements -================ - -#. N/A - - -✨ Features -=========== - -#. N/A - - -🐛 Bugs Fixed -============= - -#. N/A - - -💣 Incompatible Changes -======================= - -#. N/A - - -🚀 Performance Enhancements -=========================== - -#. N/A - - -🔥 Deprecations -=============== - -#. N/A - - -🔗 Dependencies -=============== - -#. N/A - - -📚 Documentation -================ - -#. N/A - - -💼 Internal -=========== - -#. N/A - - -.. comment - Whatsnew author names (@github name) in alphabetical order. Note that, - core dev names are automatically included by the common_links.inc: - - - - -.. comment - Whatsnew resources in alphabetical order: From fccf9e603b694f3cf682028a3f3b35402422bd16 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 18 Aug 2023 11:18:04 +0100 Subject: [PATCH 033/134] Tweak indents to fix list numbering. (#5427) --- docs/src/whatsnew/3.7.rst | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/docs/src/whatsnew/3.7.rst b/docs/src/whatsnew/3.7.rst index 237e2873cb..d5ea21f3d7 100644 --- a/docs/src/whatsnew/3.7.rst +++ b/docs/src/whatsnew/3.7.rst @@ -47,7 +47,7 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles lazy data. (:pull:`5307`) -.. _concat_warnings: + .. _concat_warnings: #. `@acchamber`_ added error and warning messages about coordinate overlaps to :func:`~iris.cube.CubeList.concatenate` to improve the concatenation process. @@ -74,7 +74,7 @@ This document explains the changes made to Iris for this release #. `@acchamber`_ removed some obsolete code that prevented extraction of time points from cubes with bounded times (:pull:`5175`) -.. _cftime_warnings: + .. _cftime_warnings: #. `@rcomer`_ modified pp-loading to avoid a ``cftime`` warning for non-standard calendars. (:pull:`5357`) @@ -104,35 +104,33 @@ This document explains the changes made to Iris for this release #. N/A - 📚 Documentation ================ - .. _docs_dark: #. `@tkknight`_ prepared the documentation for dark mode and enable the option to use it. By default the theme will be based on the users system settings, defaulting to ``light`` if no system setting is found. (:pull:`5299`) -.. _dask_guide: + .. _dask_guide: #. `@HGWright`_ added a :doc:`/further_topics/dask_best_practices/index` section into the user guide, containing advice and use cases to help users get the best out of Dask with Iris. (:pull:`5190`) -.. _convert_docs: + .. _convert_docs: #. `@acchamber`_ improved documentation for :meth:`~iris.cube.Cube.convert_units` and :meth:`~iris.coords.Coord.convert_units` by including a link to the UDUNITS-2 documentation which contains lists of compatible units and aliases for them. (:pull:`5388`) - -.. _installdocs_update: + .. _installdocs_update: #. `@rcomer`_ updated the :ref:`Installation Guide` to reflect that some things are now simpler. (:pull:`5416`) + 💼 Internal =========== @@ -141,7 +139,7 @@ This document explains the changes made to Iris for this release `"Xarray bridge" `_ facility. (:pull:`5214`, :pull:`5212`) -.. _contour_future: + .. _contour_future: #. `@rcomer`_ updated :func:`~iris.plot.contourf` to avoid using functionality that is deprecated in Matplotlib v3.8 (:pull:`5405`) From f21f4c47fe358c64fce2533550168e157381860b Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 17 Aug 2023 17:51:05 +0100 Subject: [PATCH 034/134] Restore latest Whats New files. --- docs/src/whatsnew/index.rst | 3 +- docs/src/whatsnew/latest.rst | 85 ++++++++++++++++++++ docs/src/whatsnew/latest.rst.template | 107 ++++++++++++++++++++++++++ 3 files changed, 194 insertions(+), 1 deletion(-) create mode 100644 docs/src/whatsnew/latest.rst create mode 100644 docs/src/whatsnew/latest.rst.template diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index d2a15be1f7..c556f82761 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -5,12 +5,13 @@ What's New in Iris ------------------ -.. include:: 3.7.rst +.. include:: latest.rst .. toctree:: :maxdepth: 1 :hidden: + latest.rst 3.7.rst 3.6.rst 3.5.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst new file mode 100644 index 0000000000..72b74bdf42 --- /dev/null +++ b/docs/src/whatsnew/latest.rst @@ -0,0 +1,85 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: |iris_version| Release Highlights + :color: primary + :icon: info + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * N/A + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. N/A + + +✨ Features +=========== + +#. N/A + + +🐛 Bugs Fixed +============= + +#. N/A + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. N/A + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. N/A + + +📚 Documentation +================ + +#. N/A + + +💼 Internal +=========== + +#. N/A + + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + + +.. comment + Whatsnew resources in alphabetical order: diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template new file mode 100644 index 0000000000..966a91e976 --- /dev/null +++ b/docs/src/whatsnew/latest.rst.template @@ -0,0 +1,107 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: |iris_version| Release Highlights + :color: primary + :icon: info + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * N/A + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +NOTE: section BELOW is a template for bugfix patches +==================================================== + (Please remove this section when creating an initial 'latest.rst' + +|iris_version| |build_date| +=========================== + +.. dropdown:: |iris_version| Patches + :color: primary + :icon: alert + :animate: fade-in + + The patches in this release of Iris include: + + #. N/A + +NOTE: section ABOVE is a template for bugfix patches +==================================================== + (Please remove this section when creating an initial 'latest.rst') + + +📢 Announcements +================ + +#. N/A + + +✨ Features +=========== + +#. N/A + + +🐛 Bugs Fixed +============= + +#. N/A + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. N/A + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. N/A + + +📚 Documentation +================ + +#. N/A + + +💼 Internal +=========== + +#. N/A + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + + + + +.. comment + Whatsnew resources in alphabetical order: From ff897eb3050452218b7dd27e911d7c5f1678c052 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 21 Aug 2023 09:46:55 +0100 Subject: [PATCH 035/134] Merge the benchmark fixes and enhancements to `main` (#5437) * Improve benchmark runner printing (#5429) * More sensible print and run functions. * Avoid permanent modifications in _subprocess_runner. * Post on demand benchmark results as comment (big refactor) (#5430) * On demand benchmarking. * Correct gh query. * Correct assignee spacing. * What's new entry. * Better comparison commits for PR benchmarking (#5431) * Don't check out head_ref - benchmark the GH simulated merge commit instead. * What's New entry. * Warn via issue if overnight benchmarks fail (#5432) * Include a warning step for overnight benchmarking. * Fix for failure warning script. * Better formatting of warning issue title. * What's new entry. * Minor benchmark improvements (#5433) * Use shlex.split() for bm_runner commands. * Minor documentation clarifications. * Set benchmark runs to error if the subprocess errors (#5434) * Set benchmark runs to error if the subprocess errors. * Still compare results even from a broken run. * Still upload reports if overnight run fails. * What's New entry. * Hard-code conda channel into asv_delegated_conda.py (#5435) * What's new entry. * What's New entry. * Hard-code conda channel into asv_delegated_conda.py . * Fix some rebase confusion in the What's New. * Inflate benchmark data to ensure laziness (#5436) * Inflate benchmark data to ensure laziness. * What's New entry. * Benchmark feature branch what's new entry (#5438) * What's new entry. * Correct user name @ESadek-MO. * Missing colon. --- .github/workflows/benchmarks_report.yml | 83 ++++ .../{benchmark.yml => benchmarks_run.yml} | 76 ++-- .gitignore | 1 + benchmarks/asv.conf.json | 1 - benchmarks/asv_delegated_conda.py | 4 + .../experimental/ugrid/regions_combine.py | 2 +- benchmarks/benchmarks/load/__init__.py | 2 +- benchmarks/benchmarks/load/ugrid.py | 2 +- benchmarks/benchmarks/save.py | 2 +- benchmarks/bm_runner.py | 362 ++++++++++++++---- docs/src/whatsnew/latest.rst | 7 +- 11 files changed, 408 insertions(+), 134 deletions(-) create mode 100644 .github/workflows/benchmarks_report.yml rename .github/workflows/{benchmark.yml => benchmarks_run.yml} (57%) diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml new file mode 100644 index 0000000000..cffa1b1ef4 --- /dev/null +++ b/.github/workflows/benchmarks_report.yml @@ -0,0 +1,83 @@ +# Post any reports generated by benchmarks_run.yml . +# Separated for security: +# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ + +name: benchmarks-report +run-name: Report benchmark results + +on: + workflow_run: + workflows: [benchmarks-run] + types: + - completed + +jobs: + download: + runs-on: ubuntu-latest + outputs: + reports_exist: ${{ steps.unzip.outputs.reports_exist }} + steps: + - name: Download artifact + id: download-artifact + # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#using-data-from-the-triggering-workflow + uses: actions/github-script@v6 + with: + script: | + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: context.payload.workflow_run.id, + }); + let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { + return artifact.name == "benchmark_reports" + })[0]; + if (typeof matchArtifact != 'undefined') { + let download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + let fs = require('fs'); + fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/benchmark_reports.zip`, Buffer.from(download.data)); + }; + + - name: Unzip artifact + id: unzip + run: | + if test -f "benchmark_reports.zip"; then + reports_exist=1 + unzip benchmark_reports.zip -d benchmark_reports + else + reports_exist=0 + fi + echo "reports_exist=$reports_exist" >> "$GITHUB_OUTPUT" + + - name: Store artifact + uses: actions/upload-artifact@v3 + with: + name: benchmark_reports + path: benchmark_reports + + post_reports: + runs-on: ubuntu-latest + needs: download + if: needs.download.outputs.reports_exist == 1 + steps: + - name: Checkout repo + uses: actions/checkout@v3 + + - name: Download artifact + uses: actions/download-artifact@v3 + with: + name: benchmark_reports + path: .github/workflows/benchmark_reports + + - name: Set up Python + # benchmarks/bm_runner.py only needs builtins to run. + uses: actions/setup-python@v3 + + - name: Post reports + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: python benchmarks/bm_runner.py _gh_post diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmarks_run.yml similarity index 57% rename from .github/workflows/benchmark.yml rename to .github/workflows/benchmarks_run.yml index 5be56c1d80..a39c531a77 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmarks_run.yml @@ -1,6 +1,9 @@ -# Use ASV to check for performance regressions in the last 24 hours' commits. +# Use ASV to check for performance regressions, either: +# - In the last 24 hours' commits. +# - Introduced by this pull request. -name: benchmark-check +name: benchmarks-run +run-name: Run benchmarks on: schedule: @@ -9,7 +12,7 @@ on: workflow_dispatch: inputs: first_commit: - description: "Argument to be passed to the overnight benchmark script." + description: "First commit to benchmark (see bm_runner.py > Overnight)." required: false type: string pull_request: @@ -74,12 +77,17 @@ jobs: - name: Benchmark this pull request if: ${{ github.event.label.name == 'benchmark_this' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.number }} run: | - git checkout ${{ github.head_ref }} python benchmarks/bm_runner.py branch origin/${{ github.base_ref }} - name: Run overnight benchmarks + id: overnight if: ${{ github.event_name != 'pull_request' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | first_commit=${{ inputs.first_commit }} if [ "$first_commit" == "" ] @@ -92,57 +100,27 @@ jobs: python benchmarks/bm_runner.py overnight $first_commit fi - - name: Create issues for performance shifts - if: ${{ github.event_name != 'pull_request' }} + - name: Warn of failure + if: > + failure() && + steps.overnight.outcome == 'failure' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - if [ -d benchmarks/.asv/performance-shifts ] - then - cd benchmarks/.asv/performance-shifts - for commit_file in * - do - commit="${commit_file%.*}" - pr_number=$(git log "$commit"^! --oneline | grep -o "#[0-9]*" | tail -1 | cut -c 2-) - author=$(gh pr view $pr_number --json author -q '.["author"]["login"]' --repo $GITHUB_REPOSITORY) - merger=$(gh pr view $pr_number --json mergedBy -q '.["mergedBy"]["login"]' --repo $GITHUB_REPOSITORY) - # Find a valid assignee from author/merger/nothing. - if curl -s https://api.github.com/users/$author | grep -q '"type": "User"'; then - assignee=$author - elif curl -s https://api.github.com/users/$merger | grep -q '"type": "User"'; then - assignee=$merger - else - assignee="" - fi - title="Performance Shift(s): \`$commit\`" - body=" - Benchmark comparison has identified performance shifts at - - * commit $commit (#$pr_number). - - Please review the report below and \ - take corrective/congratulatory action as appropriate \ - :slightly_smiling_face: + title="Overnight benchmark workflow failed: \`${{ github.run_id }}\`" + body="Generated by GHA run [\`${{github.run_id}}\`](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" + gh issue create --title "$title" --body "$body" --label "Bot" --label "Type: Performance" --repo $GITHUB_REPOSITORY -
- Performance shift report - - \`\`\` - $(cat $commit_file) - \`\`\` - -
- - Generated by GHA run [\`${{github.run_id}}\`](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}) - " - gh issue create --title "$title" --body "$body" --assignee $assignee --label "Bot" --label "Type: Performance" --repo $GITHUB_REPOSITORY - done - fi + - name: Upload any benchmark reports + if: success() || steps.overnight.outcome == 'failure' + uses: actions/upload-artifact@v3 + with: + name: benchmark_reports + path: .github/workflows/benchmark_reports - name: Archive asv results if: ${{ always() }} uses: actions/upload-artifact@v3 with: - name: asv-report - path: | - benchmarks/.asv/results + name: asv-raw-results + path: benchmarks/.asv/results diff --git a/.gitignore b/.gitignore index 4d0b474e8a..42d02d8c71 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,7 @@ pip-cache # asv data, environments, results .asv benchmarks/.data +.github/workflows/benchmark_reports #Translations *.mo diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index faa7f6daee..fab5bcb44e 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -4,7 +4,6 @@ "project_url": "https://github.com/SciTools/iris", "repo": "..", "environment_type": "conda-delegated", - "conda_channels": ["conda-forge", "defaults"], "show_commit_url": "http://github.com/scitools/iris/commit/", "branches": ["upstream/main"], diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index 250a4e032d..22a3110075 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -66,6 +66,8 @@ def __init__( ignored.append("`requirements`") if tagged_env_vars: ignored.append("`tagged_env_vars`") + if conf.conda_channels: + ignored.append("conda_channels") if conf.conda_environment_file: ignored.append("`conda_environment_file`") message = ( @@ -75,6 +77,8 @@ def __init__( log.warning(message) requirements = {} tagged_env_vars = {} + # All that is required to create ASV's bare-bones environment. + conf.conda_channels = ["defaults"] conf.conda_environment_file = None super().__init__(conf, python, requirements, tagged_env_vars) diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index c5f8fb564e..16044c663a 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -30,7 +30,7 @@ class MixinCombineRegions: # Characterise time taken + memory-allocated, for various stages of combine # operations on cubesphere-like test data. - params = [4, 500] + params = [50, 500] param_names = ["cubesphere-N"] def _parametrised_cache_filename(self, n_cubesphere, content_name): diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index 1b0ea696f6..3b2a83b1b1 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -27,7 +27,7 @@ class LoadAndRealise: # For data generation timeout = 600.0 params = [ - [(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], + [(50, 50, 2), (1280, 960, 5), (2, 2, 1000)], [False, True], ["FF", "PP", "NetCDF"], ] diff --git a/benchmarks/benchmarks/load/ugrid.py b/benchmarks/benchmarks/load/ugrid.py index 350a78e128..35c8754171 100644 --- a/benchmarks/benchmarks/load/ugrid.py +++ b/benchmarks/benchmarks/load/ugrid.py @@ -77,7 +77,7 @@ class DataRealisation: warmup_time = 0.0 timeout = 300.0 - params = [1, int(2e5)] + params = [int(1e4), int(2e5)] param_names = ["number of faces"] def setup_common(self, **kwargs): diff --git a/benchmarks/benchmarks/save.py b/benchmarks/benchmarks/save.py index 3551c72528..d00c66a0ca 100644 --- a/benchmarks/benchmarks/save.py +++ b/benchmarks/benchmarks/save.py @@ -21,7 +21,7 @@ class NetcdfSave: - params = [[1, 600], [False, True]] + params = [[50, 600], [False, True]] param_names = ["cubesphere-N", "is_unstructured"] def setup(self, n_cubesphere, is_unstructured): diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index f3efb0ea31..b3145fbdf1 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -15,8 +15,10 @@ from os import environ from pathlib import Path import re +import shlex import subprocess from tempfile import NamedTemporaryFile +from textwrap import dedent from typing import Literal # The threshold beyond which shifts are 'notable'. See `asv compare`` docs @@ -24,6 +26,9 @@ COMPARE_FACTOR = 1.2 BENCHMARKS_DIR = Path(__file__).parent +ROOT_DIR = BENCHMARKS_DIR.parent +# Storage location for reports used in GitHub actions. +GH_REPORT_DIR = ROOT_DIR.joinpath(".github", "workflows", "benchmark_reports") # Common ASV arguments for all run_types except `custom`. ASV_HARNESS = ( @@ -32,17 +37,27 @@ ) -def _subprocess_run_print(args, **kwargs): +def echo(echo_string: str): # Use subprocess for printing to reduce chance of printing out of sequence # with the subsequent calls. - subprocess.run(["echo", f"BM_RUNNER DEBUG: {' '.join(args)}"]) + subprocess.run(["echo", f"BM_RUNNER DEBUG: {echo_string}"]) + + +def _subprocess_runner(args, asv=False, **kwargs): + # Avoid permanent modifications if the same arguments are used more than once. + args = args.copy() + kwargs = kwargs.copy() + if asv: + args.insert(0, "asv") + kwargs["cwd"] = BENCHMARKS_DIR + echo(" ".join(args)) + kwargs.setdefault("check", True) return subprocess.run(args, **kwargs) -def _subprocess_run_asv(args, **kwargs): - args.insert(0, "asv") - kwargs["cwd"] = BENCHMARKS_DIR - return _subprocess_run_print(args, **kwargs) +def _subprocess_runner_capture(args, **kwargs) -> str: + result = _subprocess_runner(args, capture_output=True, **kwargs) + return result.stdout.decode().rstrip() def _check_requirements(package: str) -> None: @@ -61,19 +76,18 @@ def _prep_data_gen_env() -> None: Create/access a separate, unchanging environment for generating test data. """ - root_dir = BENCHMARKS_DIR.parent python_version = "3.11" data_gen_var = "DATA_GEN_PYTHON" if data_gen_var in environ: - print("Using existing data generation environment.") + echo("Using existing data generation environment.") else: - print("Setting up the data generation environment ...") + echo("Setting up the data generation environment ...") # Get Nox to build an environment for the `tests` session, but don't # run the session. Will re-use a cached environment if appropriate. - _subprocess_run_print( + _subprocess_runner( [ "nox", - f"--noxfile={root_dir / 'noxfile.py'}", + f"--noxfile={ROOT_DIR / 'noxfile.py'}", "--session=tests", "--install-only", f"--python={python_version}", @@ -82,14 +96,14 @@ def _prep_data_gen_env() -> None: # Find the environment built above, set it to be the data generation # environment. data_gen_python = next( - (root_dir / ".nox").rglob(f"tests*/bin/python{python_version}") + (ROOT_DIR / ".nox").rglob(f"tests*/bin/python{python_version}") ).resolve() environ[data_gen_var] = str(data_gen_python) - print("Installing Mule into data generation environment ...") + echo("Installing Mule into data generation environment ...") mule_dir = data_gen_python.parents[1] / "resources" / "mule" if not mule_dir.is_dir(): - _subprocess_run_print( + _subprocess_runner( [ "git", "clone", @@ -97,7 +111,7 @@ def _prep_data_gen_env() -> None: str(mule_dir), ] ) - _subprocess_run_print( + _subprocess_runner( [ str(data_gen_python), "-m", @@ -107,7 +121,7 @@ def _prep_data_gen_env() -> None: ] ) - print("Data generation environment ready.") + echo("Data generation environment ready.") def _setup_common() -> None: @@ -116,41 +130,192 @@ def _setup_common() -> None: _prep_data_gen_env() - print("Setting up ASV ...") - _subprocess_run_asv(["machine", "--yes"]) + echo("Setting up ASV ...") + _subprocess_runner(["machine", "--yes"], asv=True) - print("Setup complete.") + echo("Setup complete.") def _asv_compare(*commits: str, overnight_mode: bool = False) -> None: - """Run through a list of commits comparing each one to the next.""" + """ + Run through a list of commits comparing each one to the next. + """ commits = [commit[:8] for commit in commits] - shifts_dir = BENCHMARKS_DIR / ".asv" / "performance-shifts" for i in range(len(commits) - 1): before = commits[i] after = commits[i + 1] - asv_command = ( + asv_command = shlex.split( f"compare {before} {after} --factor={COMPARE_FACTOR} --split" ) - _subprocess_run_asv(asv_command.split(" ")) - - if overnight_mode: - # Record performance shifts. - # Run the command again but limited to only showing performance - # shifts. - shifts = _subprocess_run_asv( - [*asv_command.split(" "), "--only-changed"], - capture_output=True, - text=True, - ).stdout - if shifts: - # Write the shifts report to a file. - # Dir is used by .github/workflows/benchmarks.yml, - # but not cached - intended to be discarded after run. - shifts_dir.mkdir(exist_ok=True, parents=True) - shifts_path = (shifts_dir / after).with_suffix(".txt") - with shifts_path.open("w") as shifts_file: - shifts_file.write(shifts) + + comparison = _subprocess_runner_capture(asv_command, asv=True) + echo(comparison) + shifts = _subprocess_runner_capture( + [*asv_command, "--only-changed"], asv=True + ) + + if shifts or (not overnight_mode): + # For the overnight run: only post if there are shifts. + _gh_create_reports(after, comparison, shifts) + + +def _gh_create_reports( + commit_sha: str, results_full: str, results_shifts: str +) -> None: + """ + If running under GitHub Actions: record the results in report(s). + + Posting the reports is done by :func:`_gh_post_reports`, which must be run + within a separate action to comply with GHA's security limitations. + """ + if "GITHUB_ACTIONS" not in environ: + # Only run when within GHA. + return + + pr_number = environ.get("PR_NUMBER", None) + on_pull_request = pr_number is not None + run_id = environ["GITHUB_RUN_ID"] + repo = environ["GITHUB_REPOSITORY"] + gha_run_link = ( + f"[`{run_id}`](https://github.com/{repo}/actions/runs/{run_id})" + ) + + GH_REPORT_DIR.mkdir(exist_ok=True) + commit_dir = GH_REPORT_DIR / commit_sha + commit_dir.mkdir() + command_path = commit_dir / "command.txt" + body_path = commit_dir / "body.txt" + + performance_report = dedent( + ( + """ + ### Performance Benchmark Report: {commit_sha} + +
+ Performance shifts + + ``` + {results_shifts} + ``` + +
+ +
+ Full benchmark results + + ``` + {results_full} + ``` + +
+ + Generated by GHA run {gha_run_link} + """ + ) + ) + performance_report = performance_report.format( + commit_sha=commit_sha, + results_shifts=results_shifts, + results_full=results_full, + gha_run_link=gha_run_link, + ) + + if on_pull_request: + # Command to post the report as a comment on the active PR. + body_path.write_text(performance_report) + command = ( + f"gh pr comment {pr_number} " + f"--body-file {body_path.absolute()} " + f"--repo {repo}" + ) + command_path.write_text(command) + + else: + # Command to post the report as new issue. + commit_msg = _subprocess_runner_capture( + f"git log {commit_sha}^! --oneline".split(" ") + ) + # Intended for benchmarking commits on trunk - should include a PR + # number due to our squash policy. + pr_tag_match = re.search("#[0-9]*", commit_msg) + + assignee = "" + pr_tag = "pull request number unavailable" + if pr_tag_match is not None: + pr_tag = pr_tag_match.group(0) + + for login_type in ("author", "mergedBy"): + gh_query = f'.["{login_type}"]["login"]' + command = shlex.split( + f"gh pr view {pr_tag[1:]} " + f"--json {login_type} -q '{gh_query}' " + f"--repo {repo}" + ) + login = _subprocess_runner_capture(command) + + command = [ + "curl", + "-s", + f"https://api.github.com/users/{login}", + ] + login_info = _subprocess_runner_capture(command) + is_user = '"type": "User"' in login_info + if is_user: + assignee = login + break + + title = f"Performance Shift(s): `{commit_sha}`" + body = dedent( + ( + f""" + Benchmark comparison has identified performance shifts at: + + * commit {commit_sha} ({pr_tag}). + +

+ Please review the report below and + take corrective/congratulatory action as appropriate + :slightly_smiling_face: +

+ """ + ) + ) + body += performance_report + body_path.write_text(body) + + command = ( + "gh issue create " + f'--title "{title}" ' + f"--body-file {body_path.absolute()} " + '--label "Bot" ' + '--label "Type: Performance" ' + f"--repo {repo}" + ) + if assignee: + command += f" --assignee {assignee}" + command_path.write_text(command) + + +def _gh_post_reports() -> None: + """ + If running under GitHub Actions: post pre-prepared benchmark reports. + + Reports are prepared by :func:`_gh_create_reports`, which must be run + within a separate action to comply with GHA's security limitations. + """ + if "GITHUB_ACTIONS" not in environ: + # Only run when within GHA. + return + + commit_dirs = [x for x in GH_REPORT_DIR.iterdir() if x.is_dir()] + for commit_dir in commit_dirs: + command_path = commit_dir / "command.txt" + command = command_path.read_text() + + # Security: only accept certain commands to run. + assert command.startswith(("gh issue create", "gh pr comment")) + + _subprocess_runner(shlex.split(command)) class _SubParserGenerator(ABC): @@ -168,18 +333,21 @@ def __init__(self, subparsers: ArgumentParser.add_subparsers) -> None: formatter_class=argparse.RawTextHelpFormatter, ) self.add_arguments() - self.subparser.add_argument( - "asv_args", - nargs=argparse.REMAINDER, - help="Any number of arguments to pass down to ASV.", - ) + self.add_asv_arguments() self.subparser.set_defaults(func=self.func) @abstractmethod def add_arguments(self) -> None: - """All self.subparser.add_argument() calls.""" + """All custom self.subparser.add_argument() calls.""" _ = NotImplemented + def add_asv_arguments(self) -> None: + self.subparser.add_argument( + "asv_args", + nargs=argparse.REMAINDER, + help="Any number of arguments to pass down to the ASV benchmark command.", + ) + @staticmethod @abstractmethod def func(args: argparse.Namespace): @@ -197,11 +365,11 @@ class Overnight(_SubParserGenerator): name = "overnight" description = ( "Benchmarks all commits between the input **first_commit** to ``HEAD``, " - "comparing each to its parent for performance shifts. If a commit causes " - "shifts, the output is saved to a file:\n" - "``.asv/performance-shifts/``\n\n" + "comparing each to its parent for performance shifts. If running on " + "GitHub Actions: performance shift(s) will be reported in a new issue.\n" "Designed for checking the previous 24 hours' commits, typically in a " - "scheduled script." + "scheduled script.\n" + "Uses `asv run`." ) epilog = ( "e.g. python bm_runner.py overnight a1b23d4\n" @@ -220,16 +388,20 @@ def func(args: argparse.Namespace) -> None: _setup_common() commit_range = f"{args.first_commit}^^.." - asv_command = ASV_HARNESS.format(posargs=commit_range) - _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) - # git rev-list --first-parent is the command ASV uses. - git_command = f"git rev-list --first-parent {commit_range}" - commit_string = _subprocess_run_print( - git_command.split(" "), capture_output=True, text=True - ).stdout - commit_list = commit_string.rstrip().split("\n") - _asv_compare(*reversed(commit_list), overnight_mode=True) + git_command = shlex.split( + f"git rev-list --first-parent {commit_range}" + ) + commit_string = _subprocess_runner_capture(git_command) + commit_list = commit_string.split("\n") + + asv_command = shlex.split(ASV_HARNESS.format(posargs=commit_range)) + try: + _subprocess_runner([*asv_command, *args.asv_args], asv=True) + finally: + # Designed for long running - want to compare/post any valid + # results even if some are broken. + _asv_compare(*reversed(commit_list), overnight_mode=True) class Branch(_SubParserGenerator): @@ -237,11 +409,15 @@ class Branch(_SubParserGenerator): description = ( "Performs the same operations as ``overnight``, but always on two commits " "only - ``HEAD``, and ``HEAD``'s merge-base with the input " - "**base_branch**. Output from this run is never saved to a file. Designed " + "**base_branch**. If running on GitHub Actions: HEAD will be GitHub's " + "merge commit and merge-base will be the merge target. Performance " + "comparisons will be posted in a comment on the relevant pull request.\n" + "Designed " "for testing if the active branch's changes cause performance shifts - " "anticipating what would be caught by ``overnight`` once merged.\n\n" "**For maximum accuracy, avoid using the machine that is running this " - "session. Run time could be >1 hour for the full benchmark suite.**" + "session. Run time could be >1 hour for the full benchmark suite.**\n" + "Uses `asv run`." ) epilog = ( "e.g. python bm_runner.py branch upstream/main\n" @@ -259,19 +435,22 @@ def add_arguments(self) -> None: def func(args: argparse.Namespace) -> None: _setup_common() - git_command = f"git merge-base HEAD {args.base_branch}" - merge_base = _subprocess_run_print( - git_command.split(" "), capture_output=True, text=True - ).stdout[:8] + git_command = shlex.split("git rev-parse HEAD") + head_sha = _subprocess_runner_capture(git_command)[:8] + + git_command = shlex.split( + f"git merge-base {head_sha} {args.base_branch}" + ) + merge_base = _subprocess_runner_capture(git_command)[:8] with NamedTemporaryFile("w") as hashfile: - hashfile.writelines([merge_base, "\n", "HEAD"]) + hashfile.writelines([merge_base, "\n", head_sha]) hashfile.flush() commit_range = f"HASHFILE:{hashfile.name}" - asv_command = ASV_HARNESS.format(posargs=commit_range) - _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) + asv_command = shlex.split(ASV_HARNESS.format(posargs=commit_range)) + _subprocess_runner([*asv_command, *args.asv_args], asv=True) - _asv_compare(merge_base, "HEAD") + _asv_compare(merge_base, head_sha) class _CSPerf(_SubParserGenerator, ABC): @@ -281,7 +460,8 @@ class _CSPerf(_SubParserGenerator, ABC): "Run the on-demand {} suite of benchmarks (part of the UK Met " "Office NG-VAT project) for the ``HEAD`` of ``upstream/main`` only, " "and publish the results to the input **publish_dir**, within a " - "unique subdirectory for this run." + "unique subdirectory for this run.\n" + "Uses `asv run`." ) epilog = ( "e.g. python bm_runner.py {0} my_publish_dir\n" @@ -325,15 +505,19 @@ def csperf( # Don't fail the whole run if memory blows on 1 benchmark. asv_command = asv_command.replace(" --strict", "") # Only do a single round. - asv_command = re.sub(r"rounds=\d", "rounds=1", asv_command) - _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) + asv_command = shlex.split( + re.sub(r"rounds=\d", "rounds=1", asv_command) + ) + _subprocess_runner([*asv_command, *args.asv_args], asv=True) - asv_command = f"publish {commit_range} --html-dir={publish_subdir}" - _subprocess_run_asv(asv_command.split(" ")) + asv_command = shlex.split( + f"publish {commit_range} --html-dir={publish_subdir}" + ) + _subprocess_runner(asv_command, asv=True) # Print completion message. location = BENCHMARKS_DIR / ".asv" - print( + echo( f'New ASV results for "{run_type}".\n' f'See "{publish_subdir}",' f'\n or JSON files under "{location / "results"}".' @@ -380,7 +564,29 @@ def add_arguments(self) -> None: @staticmethod def func(args: argparse.Namespace) -> None: _setup_common() - _subprocess_run_asv([args.asv_sub_command, *args.asv_args]) + _subprocess_runner([args.asv_sub_command, *args.asv_args], asv=True) + + +class GhPost(_SubParserGenerator): + name = "_gh_post" + description = ( + "Used by GitHub Actions to post benchmark reports that were prepared " + "during previous actions. Separated to comply with GitHub's security " + "requirements." + ) + epilog = "Sole acceptable syntax: python bm_runner.py _gh_post" + + @staticmethod + def func(args: argparse.Namespace) -> None: + _gh_post_reports() + + # No arguments permitted for this subclass: + + def add_arguments(self) -> None: + pass + + def add_asv_arguments(self) -> None: + pass def main(): @@ -390,7 +596,7 @@ def main(): ) subparsers = parser.add_subparsers(required=True) - for gen in (Overnight, Branch, CPerf, SPerf, Custom): + for gen in (Overnight, Branch, CPerf, SPerf, Custom, GhPost): _ = gen(subparsers).subparser parsed = parser.parse_args() diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 72b74bdf42..35e2158141 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -72,8 +72,11 @@ This document explains the changes made to Iris for this release 💼 Internal =========== -#. N/A - +#. `@trexfeathers`_ and `@ESadek-MO`_ (reviewer) performed a suite of fixes and + improvements for benchmarking, primarily to get + :ref:`on demand pull request benchmarking ` + working properly. (Main pull request: :pull:`5437`, more detail: + :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) .. comment From 687f4a7af0d21d7a66febda475fbb4884a556944 Mon Sep 17 00:00:00 2001 From: Agriya Khetarpal <74401230+agriyakhetarpal@users.noreply.github.com> Date: Mon, 21 Aug 2023 16:35:10 +0530 Subject: [PATCH 036/134] =?UTF-8?q?Do=20not=20run=20CI=20stale=20bot=20on?= =?UTF-8?q?=20`Dragon=20=F0=9F=90=89`=20labelled=20issues=20and=20PRs=20(#?= =?UTF-8?q?5425)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/stale.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 203dc43b4e..67b0515e8b 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -33,7 +33,7 @@ jobs: Otherwise this issue will be automatically closed in 28 days time. # Comment on the staled prs. - stale-pr-message: | + stale-pr-message: | In order to maintain a backlog of relevant PRs, we automatically label them as stale after 500 days of inactivity. If this PR is still important to you, then please comment on this PR and the stale label will be removed. @@ -43,7 +43,7 @@ jobs: # Comment on the staled issues while closed. close-issue-message: | This stale issue has been automatically closed due to a lack of community activity. - + If you still care about this issue, then please either: * Re-open this issue, if you have sufficient permissions, or * Add a comment pinging `@SciTools/iris-devs` who will re-open on your behalf. @@ -51,12 +51,12 @@ jobs: # Comment on the staled prs while closed. close-pr-message: | This stale PR has been automatically closed due to a lack of community activity. - + If you still care about this PR, then please either: * Re-open this PR, if you have sufficient permissions, or * Add a comment pinging `@SciTools/iris-devs` who will re-open on your behalf. - # Label to apply on staled issues. + # Label to apply on staled issues. stale-issue-label: Stale # Label to apply on staled prs. @@ -64,11 +64,11 @@ jobs: # Labels on issues exempted from stale. exempt-issue-labels: - "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" + "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue, Dragon 🐉, Dragon Sub-Task 🦎" # Labels on prs exempted from stale. exempt-pr-labels: - "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" + "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue, Dragon 🐉, Dragon Sub-Task 🦎" # Max number of operations per run. operations-per-run: 300 From dc4fac6cb710a0004922c9a516888f1f054c36b0 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Tue, 22 Aug 2023 11:35:12 +0100 Subject: [PATCH 037/134] Updated environment lockfiles (#5439) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 34 +++++++++++++------------- requirements/locks/py311-linux-64.lock | 34 +++++++++++++------------- requirements/locks/py39-linux-64.lock | 32 ++++++++++++------------ 3 files changed, 50 insertions(+), 50 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 9c5ea32d8e..5dbb9d5824 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -66,7 +66,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 @@ -109,7 +109,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar. https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 @@ -131,7 +131,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py310hd8f1fb https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb @@ -139,7 +139,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py310hc6cd4ac_0.con https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d @@ -175,10 +175,10 @@ https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py310h1fa729e_0.conda#8d155ac95b1dfe585bcb6bec6a91c73b +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py310h2372a71_0.conda#75e60ce53c01a121039b3050c9e1f759 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -188,7 +188,7 @@ https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py310h2372a71_0.conda#1c510e74c87dc9b8fe1f7f9e8dbcef96 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_0.conda#dfb49d3ac440e1a236080f9c300e642f https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d @@ -207,7 +207,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py310hd41b1e2_0.conda#684399f9ddc0b9d6f3b6164f6107098e -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py310h2372a71_0.conda#13df1c4ea94f2e3326b15da1999e5999 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.0-py310h2372a71_0.conda#c0967a89cb3c2b8f493b5990dcd7c26d https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_0.conda#4efe3a76fe724778a7235a2046b53233 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py310h2372a71_0.conda#f939fe2998c888a77b310926a6c666f3 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 @@ -249,16 +249,16 @@ https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.1-pyhd8ed1ab_0.conda#cda1fad481e5a7d5489c0cc44d099b9d https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.3-pyhd8ed1ab_0.conda#e5abd7f3cb1050de9bce3027d8ffb2e7 https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.1-pyhd8ed1ab_0.conda#a9709f3c314d77cf4730b7806790f4bd https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 @@ -277,10 +277,10 @@ https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#406 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.8-pyhd8ed1ab_0.conda#1e6eb6f55c967ed84a6c87306e7a9411 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 42e5224fe1..c2cd474acc 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -66,7 +66,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 @@ -109,7 +109,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar. https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 @@ -131,7 +131,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py311ha362b7 https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb @@ -139,7 +139,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py311hb755f60_0.con https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d @@ -175,10 +175,10 @@ https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py311h2582759_0.conda#dfcc3e6e30d6ec2b2bb416fcd8ff4dc1 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py311h459d7ec_0.conda#87b306459b81b7a7aaad37222d537a4f https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py311hd4cff14_5.tar.bz2#da8769492e423103c59f469f4f17f8d9 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -188,7 +188,7 @@ https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py311h459d7ec_0.conda#12b1c374ee90a1aa11ea921858394dc8 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_0.conda#7d9a31416c18704f55946ff7cf8da5dc https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 @@ -206,7 +206,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py311h9547e67_0.conda#daf3f23397ab2265d0cdfa339f3627ba -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py311h459d7ec_0.conda#3c2c65575c28b23afc5e4ff721a2fc9f +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.0-py311h459d7ec_0.conda#1b0db1a905b509db652609560ae9a2d5 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_0.conda#5c416db47b7816e437eaf0d46e5c3a3d https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py311h459d7ec_0.conda#8c1ac2c00995248898220c4c1a9d81ab https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 @@ -248,16 +248,16 @@ https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.1-pyhd8ed1ab_0.conda#cda1fad481e5a7d5489c0cc44d099b9d https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.3-pyhd8ed1ab_0.conda#e5abd7f3cb1050de9bce3027d8ffb2e7 https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.1-pyhd8ed1ab_0.conda#a9709f3c314d77cf4730b7806790f4bd https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 @@ -276,10 +276,10 @@ https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#406 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.8-pyhd8ed1ab_0.conda#1e6eb6f55c967ed84a6c87306e7a9411 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index e598fba992..ffd9d01cf5 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -66,7 +66,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 @@ -109,7 +109,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar. https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 @@ -131,7 +131,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py39h5a03fae https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb @@ -139,7 +139,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py39h3d6467e_0.cond https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d @@ -175,10 +175,10 @@ https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py39h72bdee0_0.conda#18927f971926b7271600368de71de557 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py39hd1e30aa_0.conda#41841cc1d7387bb7a30cdde4d88afbf4 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -188,7 +188,7 @@ https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py39hd1e30aa_0.conda#da334eecb1ea2248e28294c49e6f6d89 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_0.conda#ee7f18d58a96b04fdbd2e55f7694ae0d https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d @@ -247,17 +247,17 @@ https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.1-pyhd8ed1ab_0.conda#cda1fad481e5a7d5489c0cc44d099b9d https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py39h0126182_0.conda#61cee808ff7830fcceeb4f336cc738b1 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.3-pyhd8ed1ab_0.conda#e5abd7f3cb1050de9bce3027d8ffb2e7 https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.1-pyhd8ed1ab_0.conda#a9709f3c314d77cf4730b7806790f4bd https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 @@ -277,10 +277,10 @@ https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#406 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.8-pyhd8ed1ab_0.conda#1e6eb6f55c967ed84a6c87306e7a9411 From 0ba95baa07dc70030171c3a769c68d6abd261774 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 22 Aug 2023 11:35:52 +0100 Subject: [PATCH 038/134] [pre-commit.ci] pre-commit autoupdate (#5440) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/blacken-docs: 1.15.0 → 1.16.0](https://github.com/asottile/blacken-docs/compare/1.15.0...1.16.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c641389768..f89f9f487e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -56,7 +56,7 @@ repos: args: [--filter-files] - repo: https://github.com/asottile/blacken-docs - rev: 1.15.0 + rev: 1.16.0 hooks: - id: blacken-docs types: [file, rst] From 50ee726bf15fc44a8e6ad2da7fb981c6c08528f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 22 Aug 2023 11:36:22 +0100 Subject: [PATCH 039/134] Bump actions/setup-python from 3 to 4 (#5443) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 3 to 4. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/benchmarks_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml index cffa1b1ef4..365fc733a5 100644 --- a/.github/workflows/benchmarks_report.yml +++ b/.github/workflows/benchmarks_report.yml @@ -75,7 +75,7 @@ jobs: - name: Set up Python # benchmarks/bm_runner.py only needs builtins to run. - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 - name: Post reports env: From c4dcfd37b7868d41fe0256e842405f3913ff6e9c Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Tue, 22 Aug 2023 16:54:52 +0100 Subject: [PATCH 040/134] DOC: remove advice to delete main branch (#5335) --- .../developers_guide/gitwash/development_workflow.rst | 10 +--------- docs/src/developers_guide/gitwash/git_links.inc | 1 - 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/docs/src/developers_guide/gitwash/development_workflow.rst b/docs/src/developers_guide/gitwash/development_workflow.rst index b086922d5b..b38ddd6e90 100644 --- a/docs/src/developers_guide/gitwash/development_workflow.rst +++ b/docs/src/developers_guide/gitwash/development_workflow.rst @@ -14,8 +14,7 @@ Workflow Summary In what follows we'll refer to the upstream iris ``main`` branch, as "trunk". -* Don't use your ``main`` (that is on your fork) branch for anything. - Consider deleting it. +* Don't use your ``main`` (that is on your fork) branch for development. * When you are starting a new set of changes, fetch any changes from trunk, and start a new *feature branch* from that. * Make a new branch for each separable set of changes |emdash| "one task, one @@ -34,13 +33,6 @@ what you've done, and why you did it. See `linux git workflow`_ for some explanation. -Consider Deleting Your Main Branch -================================== - -It may sound strange, but deleting your own ``main`` branch can help reduce -confusion about which branch you are on. See `deleting master on github`_ for -details. - .. _update-mirror-trunk: Update the Mirror of Trunk diff --git a/docs/src/developers_guide/gitwash/git_links.inc b/docs/src/developers_guide/gitwash/git_links.inc index 11d037ccf4..42bd556c3d 100644 --- a/docs/src/developers_guide/gitwash/git_links.inc +++ b/docs/src/developers_guide/gitwash/git_links.inc @@ -28,6 +28,5 @@ .. _git config: http://schacon.github.com/git/git-config.html .. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html -.. _deleting master on github: https://matthew-brett.github.io/pydagogue/gh_delete_master.html .. |emdash| unicode:: U+02014 From b7c3f682fd484b67334fc694c17e0a80cc2233f8 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 31 Aug 2023 13:54:17 +0100 Subject: [PATCH 041/134] Whats new updates for v3.7.0 . (#5451) * Whats new updates for v3.7.0 . * Improve whatsnew and standardise its formatting. --- docs/src/whatsnew/3.7.rst | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/docs/src/whatsnew/3.7.rst b/docs/src/whatsnew/3.7.rst index d5ea21f3d7..71ce4da735 100644 --- a/docs/src/whatsnew/3.7.rst +++ b/docs/src/whatsnew/3.7.rst @@ -1,7 +1,7 @@ .. include:: ../common_links.inc -v3.7 (16 Aug 2023) [release candidate] -************************************** +v3.7 (31 Aug 2023) +****************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) @@ -18,18 +18,19 @@ This document explains the changes made to Iris for this release we have made a number of improvements for user-experience and usability, notably : - * improved messaging for :ref:`CubeList.concatenate() ` - and :ref:`Cube.convert_units() `. + * We added :ref:`Dark mode support ` for the documentation. - * avoid warnings which may occur in :ref:`pp loading ` - and :ref:`contourf `. + * We :ref:`added a "Dask Best Practices" guide ` + ( :ref:`here ` ) . - * :ref:`documentation supports Dark mode `. + * We :ref:`improved the Installation Guide `. - * :ref:`added a "Dask Best Practices" guide ` - ( :ref:`here ` ) . + * We improved the information in + :ref:`warnings from CubeList.concatenate() ` + and :ref:`documentation of Cube.convert_units() `. - * :ref:`improved the Installation Guide `. + * We prevented some warnings occurring in :ref:`pp loading ` + and :ref:`contourf `. Please do get in touch with us on :issue:`GitHub` if you have any issues or feature requests for improving Iris. Enjoy! From 4892d78d421cffe8cb7c4b3dd75fcb6f91193c5b Mon Sep 17 00:00:00 2001 From: scottrobinson02 <113097180+scottrobinson02@users.noreply.github.com> Date: Fri, 1 Sep 2023 16:06:31 +0100 Subject: [PATCH 042/134] Units when dividing a coordinate by a cube (#5331) * Commit * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: scott.robinson Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- lib/iris/cube.py | 1 + lib/iris/tests/unit/cube/test_Cube.py | 7 +++++++ 2 files changed, 8 insertions(+) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 35e3a903c6..aec80dce47 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -3722,6 +3722,7 @@ def __idiv__(self, other): def __rdiv__(self, other): data = 1 / self.core_data() reciprocal = self.copy(data=data) + reciprocal.units = reciprocal.units**-1 return iris.analysis.maths.multiply(reciprocal, other) __truediv__ = __div__ diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 8084ab31fa..a733665df8 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -2754,6 +2754,13 @@ def test_bad_coord(self): _ = self.cube.coord(bad_coord) +class Test_coord_division_units(tests.IrisTest): + def test(self): + aux = AuxCoord(1, long_name="length", units="metres") + cube = Cube(1, units="seconds") + self.assertEqual((aux / cube).units, "m.s-1") + + class Test__getitem_CellMeasure(tests.IrisTest): def setUp(self): cube = Cube(np.arange(6).reshape(2, 3)) From 897d4dc60d9e066dca2d17213968dcca2a0d2008 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Mon, 4 Sep 2023 14:16:56 +0100 Subject: [PATCH 043/134] whatsnew for #5331 (#5458) --- docs/src/whatsnew/latest.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 35e2158141..2de3dc1ced 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -36,7 +36,8 @@ This document explains the changes made to Iris for this release 🐛 Bugs Fixed ============= -#. N/A +#. `@scottrobinson02`_ fixed the output units when dividing a coordinate by a + cube. (:issue:`5305`, :pull:`5331`) 💣 Incompatible Changes @@ -83,6 +84,7 @@ This document explains the changes made to Iris for this release Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: +.. _@scottrobinson02: https://github.com/scottrobinson02 .. comment Whatsnew resources in alphabetical order: From f162be86fe22211f0d80f9be9cfc916d49bfb3bc Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 09:43:21 +0100 Subject: [PATCH 044/134] Updated environment lockfiles (#5448) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 104 +++++++++++------------- requirements/locks/py311-linux-64.lock | 106 +++++++++++-------------- requirements/locks/py39-linux-64.lock | 106 +++++++++++-------------- 3 files changed, 137 insertions(+), 179 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 5dbb9d5824..ed387a25a8 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -19,13 +19,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda# https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.9-hd590300_0.conda#a0c6f0e7e1a467f5678f94dea18c8aa7 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 @@ -34,7 +33,7 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -69,15 +68,11 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 -https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d @@ -85,36 +80,34 @@ https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#e https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_0.conda#d597567092897b1f1c7350f32e03944e https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a +https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e -https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 @@ -124,10 +117,8 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.con https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py310hd8f1fbe_9.conda#e2047ad2af52c01845f58b580c6cbd5c +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_0.conda#fb6201eb1daa3a3a2f91a4833bdf27c7 https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 @@ -135,13 +126,13 @@ https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.cond https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py310hc6cd4ac_0.conda#b903ef2ce154e97f621fe30d999227ad +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py310hc6cd4ac_0.conda#d1157aba60e67df614438afd5cd53564 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.3-pyhd8ed1ab_0.conda#3104cf0ab9fb9de393051bf92b10dbe9 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b @@ -151,12 +142,12 @@ https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#3427 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_1.tar.bz2#ad5647e517ba68e2868ef2e6e6ff7723 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_0.conda#741385a84f6a1b6623eb39226cc669e8 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.2.1-hca28451_0.conda#96aec6156d58591f5a4e67056521ce1b +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 @@ -166,7 +157,7 @@ https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py310ha4c1d20_0.conda#188e72aa313da668464e35309e9a32b0 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h1fa729e_0.conda#b0f0a014fc04012c05f39df15fe270ce https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff @@ -177,7 +168,7 @@ https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py310h2372a71_0.conda#75e60ce53c01a121039b3050c9e1f759 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_0.conda#511120451bf728d52bb37c73d4069e57 https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e @@ -191,7 +182,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_0.conda#dfb49d3ac440e1a236080f9c300e642f https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -199,8 +190,6 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc @@ -209,12 +198,12 @@ https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py310hd41b1e2_0.conda#684399f9ddc0b9d6f3b6164f6107098e https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.0-py310h2372a71_0.conda#c0967a89cb3c2b8f493b5990dcd7c26d https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_0.conda#4efe3a76fe724778a7235a2046b53233 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py310h2372a71_0.conda#f939fe2998c888a77b310926a6c666f3 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py310h2372a71_0.conda#1f18231ffab82f236ce074b2aaa07e54 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a +https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 @@ -227,17 +216,17 @@ https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_ https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h056c13c_1.conda#32d925cfd330e0cbb72b7618558a44e8 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h7dcad9a_2.conda#a46061c83ed37bfa05d1ee96ec2fbb08 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py310hc6cd4ac_0.conda#be1a7e420b7bac4ee02353d0e3161918 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_0.conda#2f45c1da3828ec2dc44d84b68916e3e7 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py310hf38f957_0.conda#9b55c9041c5a7f80f184a2cb05ec9663 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py310h7cbd5c2_1.conda#11e0099d4571b4974c04386e4ce679ed +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py310hcc13569_0.conda#6c92da4ec4e301d09a365c0584e632c8 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py310h24ef57a_1.conda#a689e86d7bbab67f889fc384aa72b088 @@ -247,33 +236,30 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.1-pyhd8ed1ab_0.conda#cda1fad481e5a7d5489c0cc44d099b9d -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.0-pyhd8ed1ab_0.conda#7bcadb3fcbb9fadeb53abef5b53b1f98 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-hf7dbed1_0.conda#ad8e8068208846032d6e9ce73d406cee +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.27-pyhd8ed1ab_0.conda#6fbde8d3bdd1874132a1b26a3554b22c https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_102.conda#6025039727a049ab4c0f2aab842c01cb https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.3-pyhd8ed1ab_0.conda#e5abd7f3cb1050de9bce3027d8ffb2e7 -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.1-pyhd8ed1ab_0.conda#a9709f3c314d77cf4730b7806790f4bd +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.0-pyhd8ed1ab_0.conda#e72cf2f28bd6288a790c13592648f265 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py310ha4c1d20_0.conda#300d3b434872eb84965864f0fcc5b5da +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h7fe3ca9_15.conda#f09d307dd78e61e4eb2c6c2f81056d0e +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py310ha4c1d20_0.conda#d1ec73b85cb90900c40c7fbcd36059e8 +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_4.conda#db878a0696f9a7980171fd3cf29cca22 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py310hff52083_0.conda#7e454b4a61754714a4a4d183641374da -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310h6f5dce6_101.conda#0d50bea104512f2728676a8bff8840d3 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 @@ -283,4 +269,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8e https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.8-pyhd8ed1ab_0.conda#1e6eb6f55c967ed84a6c87306e7a9411 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index c2cd474acc..7542d2bff0 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -19,13 +19,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda# https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.9-hd590300_0.conda#a0c6f0e7e1a467f5678f94dea18c8aa7 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 @@ -34,7 +33,7 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -69,15 +68,11 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 -https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d @@ -85,36 +80,34 @@ https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#e https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_0.conda#d597567092897b1f1c7350f32e03944e https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a +https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e -https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.4-hab00c5b_0_cpython.conda#1c628861a2a126b9fc9363ca1b7d014e -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.5-hab00c5b_0_cpython.conda#f0288cb82594b1cbc71111d1cd3c5422 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 @@ -124,10 +117,8 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.con https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py311ha362b79_9.conda#ced5340f5dc6cff43a80deac8d0e398f +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hb755f60_0.conda#b8128d083dbf6abd472b1a3e98b0b83d https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 @@ -135,13 +126,13 @@ https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.cond https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py311hb755f60_0.conda#257dfede48699e2e6372528d08399e5a +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py311hb755f60_0.conda#81d4eacf7eb2d40beee33aa71e8f94ad https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.3-pyhd8ed1ab_0.conda#3104cf0ab9fb9de393051bf92b10dbe9 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b @@ -151,12 +142,12 @@ https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#3427 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py311h4dd048b_1.tar.bz2#46d451f575392c01dc193069bd89766d +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_0.conda#f53903649188b99e6b44c560c69f5b23 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.2.1-hca28451_0.conda#96aec6156d58591f5a4e67056521ce1b +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 @@ -166,7 +157,7 @@ https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py311h64a7726_0.conda#71fd6f1734a0fa64d8f852ae7156ec45 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h2582759_0.conda#a90f8e278c1cd7064b2713e6b7db87e6 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff @@ -177,7 +168,7 @@ https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py311h459d7ec_0.conda#87b306459b81b7a7aaad37222d537a4f https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py311hd4cff14_5.tar.bz2#da8769492e423103c59f469f4f17f8d9 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py311h459d7ec_0.conda#30eaaf31141e785a445bf1ede6235fe3 https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e @@ -190,7 +181,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_0.conda#7d9a31416c18704f55946ff7cf8da5dc https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -198,8 +189,6 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc @@ -208,12 +197,12 @@ https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py311h9547e67_0.conda#daf3f23397ab2265d0cdfa339f3627ba https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.0-py311h459d7ec_0.conda#1b0db1a905b509db652609560ae9a2d5 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_0.conda#5c416db47b7816e437eaf0d46e5c3a3d -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py311h459d7ec_0.conda#8c1ac2c00995248898220c4c1a9d81ab +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py311h459d7ec_0.conda#fc327c0ea015db3b6484eabb37d44e60 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a +https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 @@ -226,17 +215,17 @@ https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_ https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311h54d622a_1.conda#a894c65b48676c4973e9ee8b59bceb9e +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311he06c224_2.conda#10a1953d2f74d292b5de093ceea104b2 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py311hb755f60_0.conda#17d25ab64a32872b349579fdb07bbdb2 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_0.conda#2f45c1da3828ec2dc44d84b68916e3e7 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py311h54ef318_0.conda#2631a9e423855fb586c05f8a5ee8b177 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py311h320fe9a_1.conda#5f92f46bd33917832a99d1660b4075ac +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py311h320fe9a_0.conda#7f35501e126df510b250ad893482ef45 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py311ha169711_1.conda#92633556d37e88ce45193374d408072c @@ -246,33 +235,30 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.1-pyhd8ed1ab_0.conda#cda1fad481e5a7d5489c0cc44d099b9d -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.0-pyhd8ed1ab_0.conda#7bcadb3fcbb9fadeb53abef5b53b1f98 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-hf7dbed1_0.conda#ad8e8068208846032d6e9ce73d406cee +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.27-pyhd8ed1ab_0.conda#6fbde8d3bdd1874132a1b26a3554b22c https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311he8ad708_102.conda#b48083ba918347f30efa94f7dc694919 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.3-pyhd8ed1ab_0.conda#e5abd7f3cb1050de9bce3027d8ffb2e7 -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.1-pyhd8ed1ab_0.conda#a9709f3c314d77cf4730b7806790f4bd +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.0-pyhd8ed1ab_0.conda#e72cf2f28bd6288a790c13592648f265 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py311h64a7726_0.conda#356da36102fc1eeb8a81e6d79e53bc7e +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h7fe3ca9_15.conda#f09d307dd78e61e4eb2c6c2f81056d0e +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py311h64a7726_0.conda#18d094fb8e4ac52f93a4f4857a8f1e8f +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_4.conda#afe5363b88d2e97266063558a6599bd0 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py311h38be061_0.conda#c056ffab165096669389e5a4eea4dc4d -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311h9a7c333_101.conda#1dc70c7c3352c0ff1f861d866860db37 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 @@ -282,4 +268,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8e https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.8-pyhd8ed1ab_0.conda#1e6eb6f55c967ed84a6c87306e7a9411 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index ffd9d01cf5..24a1b7cc90 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -19,13 +19,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda# https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.9-hd590300_0.conda#a0c6f0e7e1a467f5678f94dea18c8aa7 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 @@ -34,7 +33,7 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -69,15 +68,11 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 -https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d @@ -85,36 +80,34 @@ https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#e https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_0.conda#d597567092897b1f1c7350f32e03944e https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a +https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e -https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 -https://conda.anaconda.org/conda-forge/linux-64/python-3.9.17-h0755675_0_cpython.conda#384886ac3580bba3541ce65c992eb192 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 +https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 @@ -124,10 +117,8 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.con https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py39h5a03fae_9.conda#d1601752c6f47af7bedf838be3d8ca6b +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_0.conda#8a1b6b1f5e230aaf6408d6b0aef3492f https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 @@ -135,13 +126,13 @@ https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.cond https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py39h3d6467e_0.conda#3d700ccea39ca04cb8b6210ac653e0b1 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py39h3d6467e_0.conda#f90bb794d0f7463fbe28596796aa0100 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.3-pyhd8ed1ab_0.conda#3104cf0ab9fb9de393051bf92b10dbe9 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b @@ -151,12 +142,12 @@ https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#3427 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_1.tar.bz2#41679a052a8ce841c74df1ebc802e411 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_0.conda#3822b0ae733e022c10469c0e46bdddc4 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.2.1-hca28451_0.conda#96aec6156d58591f5a4e67056521ce1b +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 @@ -166,7 +157,7 @@ https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py39h6183b62_0.conda#f1c358d06344bd7f9a293f9af4b9b8fc https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39h72bdee0_0.conda#1d54d3a75c3192ab7655d9c3d16809f1 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff @@ -177,7 +168,7 @@ https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py39hd1e30aa_0.conda#41841cc1d7387bb7a30cdde4d88afbf4 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py39hd1e30aa_0.conda#ccecb3196b3678e9b5fc8441d681c203 https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e @@ -191,7 +182,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_0.conda#ee7f18d58a96b04fdbd2e55f7694ae0d https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -199,8 +190,6 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc @@ -208,13 +197,13 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py39h7633fee_0.conda#54e6f32e448fdc273606011f0940d076 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_0.conda#434246edfc30e20c0847d4c2caff0a53 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py39hd1e30aa_0.conda#03e44d84ea9dd2432a633407401e5688 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py39hd1e30aa_0.conda#de06dc7edaddbd3b60c050f3a95d6fe6 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.0.1-pyhd8ed1ab_0.conda#d978c61aa5fc2c69380d53ad56b5ae86 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a +https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 @@ -227,17 +216,17 @@ https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_ https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39hf1c3bca_1.conda#ae6bfe65e81d9b59a71cc01a2858650f +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39h1bc45ef_2.conda#d79ed0ee1738151284ebd97092a6a210 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py39h3d6467e_0.conda#4eaef850715aff114e2126a2f1a7b1f0 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_0.conda#2f45c1da3828ec2dc44d84b68916e3e7 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.0.1-pyhd8ed1ab_0.conda#54661981fd331e20847d8a49543dd9af https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py39h40cae4c_1.conda#cfe677f02e507f76d6767379e4ff09a9 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py39hddac248_0.conda#0a3624f600f51df010a274176e356ac5 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py39h5ed0f51_1.conda#9c455b3b3b55f13b2094932740cd3efb @@ -246,34 +235,31 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.1-pyhd8ed1ab_0.conda#cda1fad481e5a7d5489c0cc44d099b9d -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.0-pyhd8ed1ab_0.conda#7bcadb3fcbb9fadeb53abef5b53b1f98 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-hf7dbed1_0.conda#ad8e8068208846032d6e9ce73d406cee +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.27-pyhd8ed1ab_0.conda#6fbde8d3bdd1874132a1b26a3554b22c https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py39h0126182_0.conda#61cee808ff7830fcceeb4f336cc738b1 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4282601_102.conda#05390bd5ad0ddc2f719392d087673344 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.3-pyhd8ed1ab_0.conda#e5abd7f3cb1050de9bce3027d8ffb2e7 -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.1-pyhd8ed1ab_0.conda#a9709f3c314d77cf4730b7806790f4bd +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.0-pyhd8ed1ab_0.conda#e72cf2f28bd6288a790c13592648f265 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h0f8d45d_0.conda#74b1d479057aa11a70779c83262df85e -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py39h6183b62_0.conda#81212684c03e970520656f1a62ab9d39 +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h7fe3ca9_15.conda#f09d307dd78e61e4eb2c6c2f81056d0e +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py39h6183b62_0.conda#c7074f28bd86170a8235ddc995b4ee57 +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_4.conda#e12391692d70732bf1df08b7ecf40095 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py39hf3d152e_0.conda#6ce223b8b14df8bdfa72ac2a10c2fad3 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4218a78_101.conda#8f5c25bb7accd1954d8b7fc689c5975c -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 @@ -283,4 +269,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8e https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.8-pyhd8ed1ab_0.conda#1e6eb6f55c967ed84a6c87306e7a9411 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 From c526ce0e5e9f1715fc81c29c9526bfda0a78deca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 14:34:25 +0100 Subject: [PATCH 045/134] Bump scitools/workflows from 2023.06.0 to 2023.09.0 (#5471) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2023.06.0 to 2023.09.0. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2023.06.0...2023.09.0) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index c42eb90104..548cc3be1d 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.06.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.09.0 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 453014fa2a..c800381316 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.06.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.09.0 secrets: inherit From 0155124906d178cced088780ecbb39dd257e6993 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Sep 2023 14:16:26 +0100 Subject: [PATCH 046/134] [pre-commit.ci] pre-commit autoupdate (#5489) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.7.0 → 23.9.1](https://github.com/psf/black/compare/23.7.0...23.9.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f89f9f487e..32c51d35f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: additional_dependencies: [tomli] - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.9.1 hooks: - id: black pass_filenames: false From 228049053ab6b54c8313d95fb9889a484d4dca65 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Sep 2023 14:17:55 +0100 Subject: [PATCH 047/134] Bump scitools/workflows from 2023.09.0 to 2023.09.1 (#5490) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2023.09.0 to 2023.09.1. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2023.09.0...2023.09.1) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 548cc3be1d..391f944310 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.09.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.09.1 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index c800381316..a3f0c7f05f 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.09.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.09.1 secrets: inherit From 8b336e976d1d3b54c215fad49e9e744eab71ba0a Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Wed, 13 Sep 2023 13:13:29 +0100 Subject: [PATCH 048/134] No longer use ASV --strict. (#5496) --- benchmarks/bm_runner.py | 16 ++++++++++------ docs/src/whatsnew/latest.rst | 3 +++ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index b3145fbdf1..b0f98c04ac 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -32,8 +32,7 @@ # Common ASV arguments for all run_types except `custom`. ASV_HARNESS = ( - "run {posargs} --attribute rounds=4 --interleave-rounds --strict " - "--show-stderr" + "run {posargs} --attribute rounds=4 --interleave-rounds --show-stderr" ) @@ -501,14 +500,19 @@ def csperf( asv_command = ( ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}" ) - # C/SPerf benchmarks are much bigger than the CI ones: - # Don't fail the whole run if memory blows on 1 benchmark. - asv_command = asv_command.replace(" --strict", "") + # Only do a single round. asv_command = shlex.split( re.sub(r"rounds=\d", "rounds=1", asv_command) ) - _subprocess_runner([*asv_command, *args.asv_args], asv=True) + try: + _subprocess_runner([*asv_command, *args.asv_args], asv=True) + except subprocess.CalledProcessError as err: + # C/SPerf benchmarks are much bigger than the CI ones: + # Don't fail the whole run if memory blows on 1 benchmark. + # ASV produces return code of 2 if the run includes crashes. + if err.returncode != 2: + raise asv_command = shlex.split( f"publish {commit_range} --html-dir={publish_subdir}" diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 2de3dc1ced..66a48f7b46 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -79,6 +79,9 @@ This document explains the changes made to Iris for this release working properly. (Main pull request: :pull:`5437`, more detail: :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) +#. `@trexfeathers`_ adapted benchmarking to work with ASV ``>=v0.6`` by no + longer using the ``--strict`` argument. (:pull:`5496`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, From c53481ea5357c556a1927a637d07da380be8a69d Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Thu, 14 Sep 2023 10:10:32 +0100 Subject: [PATCH 049/134] Remove uses of logging.warn (#5488) * Align UGRID CF warnings with other CF warnings. * Replace remaining uses of logging warnings. * Assert for UserWarnings * Better test variable names. * What's New entry. --------- Co-authored-by: Henry Wright <84939917+HGWright@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 4 ++ lib/iris/experimental/ugrid/cf.py | 54 +++++-------------- lib/iris/experimental/ugrid/load.py | 10 ++-- .../experimental/test_ugrid_load.py | 19 +++---- ...test_CFUGridAuxiliaryCoordinateVariable.py | 51 ++++++++++-------- .../cf/test_CFUGridConnectivityVariable.py | 48 ++++++++++------- .../ugrid/cf/test_CFUGridMeshVariable.py | 42 +++++++++------ 7 files changed, 113 insertions(+), 115 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 66a48f7b46..8391f2ced6 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -79,10 +79,14 @@ This document explains the changes made to Iris for this release working properly. (Main pull request: :pull:`5437`, more detail: :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) +#. `@trexfeathers`_ replaced all uses of the ``logging.WARNING`` level, in + favour of using Python warnings, following team agreement. (:pull:`5488`) + #. `@trexfeathers`_ adapted benchmarking to work with ASV ``>=v0.6`` by no longer using the ``--strict`` argument. (:pull:`5496`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index 2d8d6cc448..86b76c7a75 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -10,15 +10,11 @@ Eventual destination: :mod:`iris.fileformats.cf`. """ -import logging +import warnings -from ...config import get_logger from ...fileformats import cf from .mesh import Connectivity -# Configure the logger. -logger = get_logger(__name__, propagate=True, handler=False) - class CFUGridConnectivityVariable(cf.CFVariable): """ @@ -50,8 +46,6 @@ class CFUGridConnectivityVariable(cf.CFVariable): def identify(cls, variables, ignore=None, target=None, warn=True): result = {} ignore, target = cls._identify_common(variables, ignore, target) - # TODO: reconsider logging level when we have consistent practice. - log_level = logging.WARNING if warn else logging.DEBUG # Identify all CF-UGRID connectivity variables. for nc_var_name, nc_var in target.items(): @@ -70,11 +64,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"{name}, referenced by netCDF variable " f"{nc_var_name}" ) - logger.log( - level=log_level, - msg=message, - extra=dict(cls=cls.__name__), - ) + if warn: + warnings.warn(message) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -88,11 +79,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"as a CF-UGRID connectivity - is a " f"CF-netCDF label variable." ) - logger.log( - level=log_level, - msg=message, - extra=dict(cls=cls.__name__), - ) + if warn: + warnings.warn(message) return result @@ -131,8 +119,6 @@ class CFUGridAuxiliaryCoordinateVariable(cf.CFVariable): def identify(cls, variables, ignore=None, target=None, warn=True): result = {} ignore, target = cls._identify_common(variables, ignore, target) - # TODO: reconsider logging level when we have consistent practice. - log_level = logging.WARNING if warn else logging.DEBUG # Identify any CF-UGRID-relevant auxiliary coordinate variables. for nc_var_name, nc_var in target.items(): @@ -149,11 +135,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"variable {name}, referenced by netCDF " f"variable {nc_var_name}" ) - logger.log( - level=log_level, - msg=message, - extra=dict(cls=cls.__name__), - ) + if warn: + warnings.warn(message) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -170,11 +153,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"auxiliary coordinate - is a " f"CF-netCDF label variable." ) - logger.log( - level=log_level, - msg=message, - extra=dict(cls=cls.__name__), - ) + if warn: + warnings.warn(message) return result @@ -205,8 +185,6 @@ class CFUGridMeshVariable(cf.CFVariable): def identify(cls, variables, ignore=None, target=None, warn=True): result = {} ignore, target = cls._identify_common(variables, ignore, target) - # TODO: reconsider logging level when we have consistent practice. - log_level = logging.WARNING if warn else logging.DEBUG # Identify all CF-UGRID mesh variables. all_vars = target == variables @@ -232,11 +210,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"Missing CF-UGRID mesh variable {name}, " f"referenced by netCDF variable {nc_var_name}" ) - logger.log( - level=log_level, - msg=message, - extra=dict(cls=cls.__name__), - ) + if warn: + warnings.warn(message) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -250,11 +225,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"CF-UGRID mesh - is a CF-netCDF label " f"variable." ) - logger.log( - level=log_level, - msg=message, - extra=dict(cls=cls.__name__), - ) + if warn: + warnings.warn(message) return result diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index cfa3935991..d2670ac690 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -15,6 +15,7 @@ from itertools import groupby from pathlib import Path import threading +import warnings from ...config import get_logger from ...coords import AuxCoord @@ -350,8 +351,7 @@ def _build_mesh(cf, mesh_var, file_path): ) if cf_role_message: cf_role_message += " Correcting to 'mesh_topology'." - # TODO: reconsider logging level when we have consistent practice. - logger.warning(cf_role_message, extra=dict(cls=None)) + warnings.warn(cf_role_message) if hasattr(mesh_var, "volume_node_connectivity"): topology_dimension = 3 @@ -369,8 +369,7 @@ def _build_mesh(cf, mesh_var, file_path): f" : *Assuming* topology_dimension={topology_dimension}" ", consistent with the attached connectivities." ) - # TODO: reconsider logging level when we have consistent practice. - logger.warning(msg, extra=dict(cls=None)) + warnings.warn(msg) else: quoted_topology_dimension = mesh_var.topology_dimension if quoted_topology_dimension != topology_dimension: @@ -382,8 +381,7 @@ def _build_mesh(cf, mesh_var, file_path): f"{quoted_topology_dimension}" " -- ignoring this as it is inconsistent." ) - # TODO: reconsider logging level when we have consistent practice. - logger.warning(msg=msg, extra=dict(cls=None)) + warnings.warn(msg) node_dimension = None edge_dimension = getattr(mesh_var, "edge_dimension", None) diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py index af97458ded..b0b60ee506 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_load.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_load.py @@ -16,8 +16,9 @@ from collections.abc import Iterable +import pytest + from iris import Constraint, load -from iris.experimental.ugrid import logger from iris.experimental.ugrid.load import ( PARSE_UGRID_ON_LOAD, load_mesh, @@ -168,8 +169,8 @@ def create_synthetic_file(self, **create_kwargs): def test_mesh_bad_topology_dimension(self): # Check that the load generates a suitable warning. - log_regex = r"topology_dimension.* ignoring" - with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): + warn_regex = r"topology_dimension.* ignoring" + with pytest.warns(UserWarning, match=warn_regex): template = "minimal_bad_topology_dim" dim_line = "mesh_var:topology_dimension = 1 ;" # which is wrong ! cube = self.create_synthetic_test_cube( @@ -181,8 +182,8 @@ def test_mesh_bad_topology_dimension(self): def test_mesh_no_topology_dimension(self): # Check that the load generates a suitable warning. - log_regex = r"Mesh variable.* has no 'topology_dimension'" - with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): + warn_regex = r"Mesh variable.* has no 'topology_dimension'" + with pytest.warns(UserWarning, match=warn_regex): template = "minimal_bad_topology_dim" dim_line = "" # don't create ANY topology_dimension property cube = self.create_synthetic_test_cube( @@ -194,8 +195,8 @@ def test_mesh_no_topology_dimension(self): def test_mesh_bad_cf_role(self): # Check that the load generates a suitable warning. - log_regex = r"inappropriate cf_role" - with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): + warn_regex = r"inappropriate cf_role" + with pytest.warns(UserWarning, match=warn_regex): template = "minimal_bad_mesh_cf_role" dim_line = 'mesh_var:cf_role = "foo" ;' _ = self.create_synthetic_test_cube( @@ -204,8 +205,8 @@ def test_mesh_bad_cf_role(self): def test_mesh_no_cf_role(self): # Check that the load generates a suitable warning. - log_regex = r"no cf_role attribute" - with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): + warn_regex = r"no cf_role attribute" + with pytest.warns(UserWarning, match=warn_regex): template = "minimal_bad_mesh_cf_role" dim_line = "" _ = self.create_synthetic_test_cube( diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py index bdf1d5e03b..a4e0e05a08 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py @@ -14,12 +14,13 @@ # importing anything else. import iris.tests as tests # isort:skip +import re +import warnings + import numpy as np +import pytest -from iris.experimental.ugrid.cf import ( - CFUGridAuxiliaryCoordinateVariable, - logger, -) +from iris.experimental.ugrid.cf import CFUGridAuxiliaryCoordinateVariable from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, ) @@ -213,26 +214,30 @@ def test_warn(self): "ref_source": ref_source, } - # The warn kwarg and expected corresponding log level. - warn_and_level = {True: "WARNING", False: "DEBUG"} + def operation(warn: bool): + warnings.warn("emit at least 1 warning") + result = CFUGridAuxiliaryCoordinateVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) # Missing warning. - log_regex = rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - result = CFUGridAuxiliaryCoordinateVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) + warn_regex = rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" + with pytest.warns(UserWarning, match=warn_regex): + operation(warn=True) + with pytest.warns() as record: + operation(warn=False) + warn_list = [str(w.message) for w in record] + assert list(filter(re.compile(warn_regex).match, warn_list)) == [] # String variable warning. - log_regex = r".*is a CF-netCDF label variable.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - result = CFUGridAuxiliaryCoordinateVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) + warn_regex = r".*is a CF-netCDF label variable.*" + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + with pytest.warns(UserWarning, match=warn_regex): + operation(warn=True) + with pytest.warns() as record: + operation(warn=False) + warn_list = [str(w.message) for w in record] + assert list(filter(re.compile(warn_regex).match, warn_list)) == [] diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py index 7d461b324a..27d5c1db90 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py @@ -14,9 +14,13 @@ # importing anything else. import iris.tests as tests # isort:skip +import re +import warnings + import numpy as np +import pytest -from iris.experimental.ugrid.cf import CFUGridConnectivityVariable, logger +from iris.experimental.ugrid.cf import CFUGridConnectivityVariable from iris.experimental.ugrid.mesh import Connectivity from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, @@ -199,26 +203,30 @@ def test_warn(self): "ref_source": ref_source, } - # The warn kwarg and expected corresponding log level. - warn_and_level = {True: "WARNING", False: "DEBUG"} + def operation(warn: bool): + warnings.warn("emit at least 1 warning") + result = CFUGridConnectivityVariable.identify(vars_all, warn=warn) + self.assertDictEqual({}, result) # Missing warning. - log_regex = rf"Missing CF-UGRID connectivity variable {subject_name}.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - result = CFUGridConnectivityVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) + warn_regex = ( + rf"Missing CF-UGRID connectivity variable {subject_name}.*" + ) + with pytest.warns(UserWarning, match=warn_regex): + operation(warn=True) + with pytest.warns() as record: + operation(warn=False) + warn_list = [str(w.message) for w in record] + assert list(filter(re.compile(warn_regex).match, warn_list)) == [] # String variable warning. - log_regex = r".*is a CF-netCDF label variable.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - result = CFUGridConnectivityVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) + warn_regex = r".*is a CF-netCDF label variable.*" + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + with pytest.warns(UserWarning, match=warn_regex): + operation(warn=True) + with pytest.warns() as record: + operation(warn=False) + warn_list = [str(w.message) for w in record] + assert list(filter(re.compile(warn_regex).match, warn_list)) == [] diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py index 08915f7cff..6b278cf1b1 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py @@ -14,9 +14,13 @@ # importing anything else. import iris.tests as tests # isort:skip +import re +import warnings + import numpy as np +import pytest -from iris.experimental.ugrid.cf import CFUGridMeshVariable, logger +from iris.experimental.ugrid.cf import CFUGridMeshVariable from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, ) @@ -242,22 +246,28 @@ def test_warn(self): "ref_source": ref_source, } - # The warn kwarg and expected corresponding log level. - warn_and_level = {True: "WARNING", False: "DEBUG"} + def operation(warn: bool): + warnings.warn("emit at least 1 warning") + result = CFUGridMeshVariable.identify(vars_all, warn=warn) + self.assertDictEqual({}, result) # Missing warning. - log_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - result = CFUGridMeshVariable.identify(vars_all, warn=warn) - self.assertDictEqual({}, result) + warn_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" + with pytest.warns(UserWarning, match=warn_regex): + operation(warn=True) + with pytest.warns() as record: + operation(warn=False) + warn_list = [str(w.message) for w in record] + assert list(filter(re.compile(warn_regex).match, warn_list)) == [] # String variable warning. - log_regex = r".*is a CF-netCDF label variable.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - result = CFUGridMeshVariable.identify(vars_all, warn=warn) - self.assertDictEqual({}, result) + warn_regex = r".*is a CF-netCDF label variable.*" + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + with pytest.warns(UserWarning, match=warn_regex): + operation(warn=True) + with pytest.warns() as record: + operation(warn=False) + warn_list = [str(w.message) for w in record] + assert list(filter(re.compile(warn_regex).match, warn_list)) == [] From 064e6ee85417ab312dcbceaa2d334dd6bf9e1f25 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Thu, 14 Sep 2023 15:07:54 +0100 Subject: [PATCH 050/134] idiff acceptance fix (#5482) * removed duplication case in Accept * updated docstring * corrected whatsnew (hopefully) * the merge undid the fix. Here we go * removed trailng . from whatsnew * changed ref to mod --- docs/src/whatsnew/latest.rst | 3 +++ lib/iris/tests/graphics/idiff.py | 17 ++++++----------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 8391f2ced6..35b6121c41 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -38,6 +38,9 @@ This document explains the changes made to Iris for this release #. `@scottrobinson02`_ fixed the output units when dividing a coordinate by a cube. (:issue:`5305`, :pull:`5331`) + +#. `@ESadek-MO`_ has updated :mod:`iris.tests.graphics.idiff` to stop duplicated file names + preventing acceptance. (:issue:`5098`, :pull:`5482`) 💣 Incompatible Changes diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index a355f2cf82..62e72f4e0e 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -73,17 +73,12 @@ def diff_viewer( repo = graphics.read_repo_json() def accept(event): - if test_id not in repo: - repo[test_id] = phash - graphics.write_repo_json(repo) - out_file = result_dir / (test_id + ".png") - result_path.rename(out_file) - msg = f"ACCEPTED: {result_path.name} -> {out_file.name}" - print(msg) - else: - msg = f"DUPLICATE: {result_path.name} -> {expected_path.name} (ignored)" - print(msg) - result_path.unlink() + repo[test_id] = phash + graphics.write_repo_json(repo) + out_file = result_dir / (test_id + ".png") + result_path.rename(out_file) + msg = f"ACCEPTED: {result_path.name} -> {out_file.name}" + print(msg) diff_fname.unlink() plt.close() From 10cec6911db04782b9b532601b2929c60ab3125c Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Tue, 19 Sep 2023 13:28:36 +0100 Subject: [PATCH 051/134] Environment lockfiles auto-update, including `asv_runner`. (#5503) * Updated environment lockfiles * Include asv_runner - to support ASV>=0.6 . --------- Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 131 +++++++++++++------------ requirements/locks/py311-linux-64.lock | 131 +++++++++++++------------ requirements/locks/py39-linux-64.lock | 129 ++++++++++++------------ requirements/py310.yml | 1 + requirements/py311.yml | 1 + requirements/py39.yml | 1 + 6 files changed, 200 insertions(+), 194 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index ed387a25a8..efa5431823 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 90bea26e2629b01270a880c650dfec7b34c38d9b6c6ddb4f8c9fee205d0e1ad6 +# input_hash: bc0383c4702650016b286d9f1d4b405e53a5c85772e04622e3c10fd51465463a @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,17 +9,15 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_1.conda#acfb4817400db5804030a3a7ef7909a1 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-3_cp310.conda#4eb33d14d794b0f4be116443ffed3853 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_1.conda#8bb001683321dcbde117a7337b5aace7 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.9-hd590300_0.conda#a0c6f0e7e1a467f5678f94dea18c8aa7 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_1.conda#ff8999574b465089ba0aa25a5e865bd0 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 @@ -28,7 +26,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fd https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 +https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f @@ -38,12 +36,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_1.conda#a0d27fd5c6f05aa45e9602b1db49581c https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b @@ -70,13 +68,13 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_1.conda#394218a92951499aed2ab1bafb30b570 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -84,31 +82,30 @@ https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.cond https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_0.conda#d597567092897b1f1c7350f32e03944e -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_1.conda#85552d64cb49f12781668779efc738ec https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 @@ -116,6 +113,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.con https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 +https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_0.conda#fb6201eb1daa3a3a2f91a4833bdf27c7 @@ -132,11 +130,11 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#1 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.3-pyhd8ed1ab_0.conda#3104cf0ab9fb9de393051bf92b10dbe9 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.1-pyh1a96a4e_0.conda#d69753ff6ee3c84a6638921dd95db662 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -144,9 +142,10 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_0.conda#741385a84f6a1b6623eb39226cc669e8 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.2.1-hca28451_0.conda#96aec6156d58591f5a4e67056521ce1b +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 @@ -154,7 +153,6 @@ https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_0.conda#5597d9f9778af6883ae64f0e7d39416c https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py310ha4c1d20_0.conda#188e72aa313da668464e35309e9a32b0 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d @@ -162,25 +160,25 @@ https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd715 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h1fa729e_0.conda#b0f0a014fc04012c05f39df15fe270ce https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py310h2372a71_0.conda#75e60ce53c01a121039b3050c9e1f759 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_0.conda#511120451bf728d52bb37c73d4069e57 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 +https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_0.conda#dfb49d3ac440e1a236080f9c300e642f -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 @@ -192,41 +190,36 @@ https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py310hd41b1e2_0.conda#684399f9ddc0b9d6f3b6164f6107098e -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.0-py310h2372a71_0.conda#c0967a89cb3c2b8f493b5990dcd7c26d +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.1-py310h2372a71_0.conda#0834a720fe60f511913ac52cd01e40dc https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_0.conda#4efe3a76fe724778a7235a2046b53233 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py310h2372a71_0.conda#1f18231ffab82f236ce074b2aaa07e54 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h74d50f4_7.conda#3453ac94a99ad9daf17e8a313d274567 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py310h582fbeb_0.conda#adcc7ea52e4d39d0a93f6a2ef36c7fd4 https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h7dcad9a_2.conda#a46061c83ed37bfa05d1ee96ec2fbb08 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py310hc6cd4ac_0.conda#be1a7e420b7bac4ee02353d0e3161918 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_0.conda#2f45c1da3828ec2dc44d84b68916e3e7 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_1.conda#483fe58e14ba244110cd1be2b771b70f +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py310hf38f957_0.conda#9b55c9041c5a7f80f184a2cb05ec9663 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py310hcc13569_0.conda#6c92da4ec4e301d09a365c0584e632c8 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py310ha4c1d20_0.conda#1ac91334ffc1f3fd297319cd1c74b34e https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py310h24ef57a_1.conda#a689e86d7bbab67f889fc384aa72b088 @@ -236,31 +229,38 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.0-pyhd8ed1ab_0.conda#7bcadb3fcbb9fadeb53abef5b53b1f98 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-hf7dbed1_0.conda#ad8e8068208846032d6e9ce73d406cee -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.27-pyhd8ed1ab_0.conda#6fbde8d3bdd1874132a1b26a3554b22c -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py310hd41b1e2_0.conda#e00d52a8657a79b0a7c8c10559784759 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-h8e1006c_1.conda#98206c865fccdea9723f0c6f9241a24f +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_102.conda#6025039727a049ab4c0f2aab842c01cb -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb -https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py310hcc13569_0.conda#6c92da4ec4e301d09a365c0584e632c8 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py310hb13e2d6_1.conda#4f522fc9cb8ecfa25e39f5c2ea65b16b +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h7dcad9a_2.conda#a46061c83ed37bfa05d1ee96ec2fbb08 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.0-pyhd8ed1ab_0.conda#e72cf2f28bd6288a790c13592648f265 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.2-pyhd8ed1ab_0.conda#ddb4fd6105b4005b312625cef210ba67 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py310h62c0568_0.conda#0ba9c5af7a6cd0244a8ae2038c89317f +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_102.conda#6025039727a049ab4c0f2aab842c01cb +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h7fe3ca9_15.conda#f09d307dd78e61e4eb2c6c2f81056d0e -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py310ha4c1d20_0.conda#d1ec73b85cb90900c40c7fbcd36059e8 +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_4.conda#db878a0696f9a7980171fd3cf29cca22 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py310hff52083_0.conda#7e454b4a61754714a4a4d183641374da -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py310hff52083_0.conda#6c194758494847c927ad3bcf37fafa49 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.0-pyhd8ed1ab_0.conda#16cff214435f2a8163fbe67db9eafb96 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 @@ -270,3 +270,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 + diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 7542d2bff0..ff6f4ff76c 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: b73fe0fbcf5caf5854030c02a6233bae6e4061e9f4175a5d8810c6bb3d7701b2 +# input_hash: e651bbc39258b157d888d85ff878fc5c3d62cc9a632c7b8bf515b62cd8d2da53 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,17 +9,15 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_1.conda#acfb4817400db5804030a3a7ef7909a1 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-3_cp311.conda#c2e2630ddb68cf52eec74dc7dfab20b5 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_1.conda#8bb001683321dcbde117a7337b5aace7 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.9-hd590300_0.conda#a0c6f0e7e1a467f5678f94dea18c8aa7 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_1.conda#ff8999574b465089ba0aa25a5e865bd0 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 @@ -28,7 +26,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fd https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 +https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f @@ -38,12 +36,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_1.conda#a0d27fd5c6f05aa45e9602b1db49581c https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b @@ -70,13 +68,13 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_1.conda#394218a92951499aed2ab1bafb30b570 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -84,31 +82,30 @@ https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.cond https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_0.conda#d597567092897b1f1c7350f32e03944e -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_1.conda#85552d64cb49f12781668779efc738ec https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.5-hab00c5b_0_cpython.conda#f0288cb82594b1cbc71111d1cd3c5422 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 @@ -116,6 +113,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.con https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea +https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hb755f60_0.conda#b8128d083dbf6abd472b1a3e98b0b83d @@ -132,11 +130,11 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#1 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.3-pyhd8ed1ab_0.conda#3104cf0ab9fb9de393051bf92b10dbe9 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.1-pyh1a96a4e_0.conda#d69753ff6ee3c84a6638921dd95db662 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -144,9 +142,10 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_0.conda#f53903649188b99e6b44c560c69f5b23 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.2.1-hca28451_0.conda#96aec6156d58591f5a4e67056521ce1b +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 @@ -154,7 +153,6 @@ https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_0.conda#9904dc4adb5d547cb21e136f98cb24b0 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py311h64a7726_0.conda#71fd6f1734a0fa64d8f852ae7156ec45 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d @@ -162,25 +160,25 @@ https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd715 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h2582759_0.conda#a90f8e278c1cd7064b2713e6b7db87e6 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py311h459d7ec_0.conda#87b306459b81b7a7aaad37222d537a4f -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py311h459d7ec_0.conda#30eaaf31141e785a445bf1ede6235fe3 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 +https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_0.conda#7d9a31416c18704f55946ff7cf8da5dc -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 @@ -191,41 +189,36 @@ https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py311h9547e67_0.conda#daf3f23397ab2265d0cdfa339f3627ba -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.0-py311h459d7ec_0.conda#1b0db1a905b509db652609560ae9a2d5 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.1-py311h459d7ec_0.conda#d23df37f3a595e8ffca99642ab6df3eb https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_0.conda#5c416db47b7816e437eaf0d46e5c3a3d https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py311h459d7ec_0.conda#fc327c0ea015db3b6484eabb37d44e60 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h74d50f4_7.conda#3453ac94a99ad9daf17e8a313d274567 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py311h0b84326_0.conda#4b24acdc1fbbae9da03147e7d2cf8c8a https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311he06c224_2.conda#10a1953d2f74d292b5de093ceea104b2 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py311hb755f60_0.conda#17d25ab64a32872b349579fdb07bbdb2 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_0.conda#2f45c1da3828ec2dc44d84b68916e3e7 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_1.conda#483fe58e14ba244110cd1be2b771b70f +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py311h54ef318_0.conda#2631a9e423855fb586c05f8a5ee8b177 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py311h320fe9a_0.conda#7f35501e126df510b250ad893482ef45 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py311h64a7726_0.conda#bf16a9f625126e378302f08e7ed67517 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py311ha169711_1.conda#92633556d37e88ce45193374d408072c @@ -235,31 +228,38 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.0-pyhd8ed1ab_0.conda#7bcadb3fcbb9fadeb53abef5b53b1f98 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-hf7dbed1_0.conda#ad8e8068208846032d6e9ce73d406cee -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.27-pyhd8ed1ab_0.conda#6fbde8d3bdd1874132a1b26a3554b22c -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py311h9547e67_0.conda#db5b3b0093d0d4565e5c89578108402e +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-h8e1006c_1.conda#98206c865fccdea9723f0c6f9241a24f +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311he8ad708_102.conda#b48083ba918347f30efa94f7dc694919 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb -https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py311h320fe9a_0.conda#7f35501e126df510b250ad893482ef45 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py311h64a7726_1.conda#58af16843fc4469770bdbaf45d3a19de +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311he06c224_2.conda#10a1953d2f74d292b5de093ceea104b2 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.0-pyhd8ed1ab_0.conda#e72cf2f28bd6288a790c13592648f265 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.2-pyhd8ed1ab_0.conda#ddb4fd6105b4005b312625cef210ba67 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py311h54ef318_0.conda#b67672c2f39ef2912a1814e29e42c7ca +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311he8ad708_102.conda#b48083ba918347f30efa94f7dc694919 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h7fe3ca9_15.conda#f09d307dd78e61e4eb2c6c2f81056d0e -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py311h64a7726_0.conda#18d094fb8e4ac52f93a4f4857a8f1e8f +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_4.conda#afe5363b88d2e97266063558a6599bd0 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py311h38be061_0.conda#c056ffab165096669389e5a4eea4dc4d -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py311h38be061_0.conda#8148b139a0560666d661cf1d179a0cca +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.0-pyhd8ed1ab_0.conda#16cff214435f2a8163fbe67db9eafb96 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 @@ -269,3 +269,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 + diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 24a1b7cc90..fc574db4f3 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: a96712105b515671c42bd403fde393d6f10f99a02267d05c771ab9ca88f64093 +# input_hash: 8b81c2e9972c5059e1b9013a49eddbd4697c92807d6f5d5282350b6c6d0dc518 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,17 +9,15 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_1.conda#acfb4817400db5804030a3a7ef7909a1 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda#0dd193187d54e585cac7eab942a8847e https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_1.conda#8bb001683321dcbde117a7337b5aace7 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.9-hd590300_0.conda#a0c6f0e7e1a467f5678f94dea18c8aa7 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_1.conda#ff8999574b465089ba0aa25a5e865bd0 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 @@ -28,7 +26,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fd https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 +https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f @@ -38,12 +36,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_1.conda#a0d27fd5c6f05aa45e9602b1db49581c https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b @@ -70,13 +68,13 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_1.conda#394218a92951499aed2ab1bafb30b570 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -84,31 +82,30 @@ https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.cond https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_0.conda#d597567092897b1f1c7350f32e03944e -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_1.conda#85552d64cb49f12781668779efc738ec https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 @@ -116,6 +113,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.con https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb +https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_0.conda#8a1b6b1f5e230aaf6408d6b0aef3492f @@ -132,11 +130,11 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#1 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.3-pyhd8ed1ab_0.conda#3104cf0ab9fb9de393051bf92b10dbe9 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.1-pyh1a96a4e_0.conda#d69753ff6ee3c84a6638921dd95db662 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -144,9 +142,10 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_0.conda#3822b0ae733e022c10469c0e46bdddc4 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.2.1-hca28451_0.conda#96aec6156d58591f5a4e67056521ce1b +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 @@ -154,7 +153,6 @@ https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_0.conda#9c858d105816f454c6b64f3e19184b60 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py39h6183b62_0.conda#f1c358d06344bd7f9a293f9af4b9b8fc https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d @@ -162,25 +160,25 @@ https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd715 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39h72bdee0_0.conda#1d54d3a75c3192ab7655d9c3d16809f1 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py39hd1e30aa_0.conda#41841cc1d7387bb7a30cdde4d88afbf4 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py39hd1e30aa_0.conda#ccecb3196b3678e9b5fc8441d681c203 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda#4fe12573bf499ff85a0a364e00cc5c53 +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 +https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_0.conda#ee7f18d58a96b04fdbd2e55f7694ae0d -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 @@ -192,41 +190,37 @@ https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py39h7633fee_0.conda#54e6f32e448fdc273606011f0940d076 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_0.conda#434246edfc30e20c0847d4c2caff0a53 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py39hd1e30aa_0.conda#de06dc7edaddbd3b60c050f3a95d6fe6 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.0.1-pyhd8ed1ab_0.conda#d978c61aa5fc2c69380d53ad56b5ae86 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h74d50f4_7.conda#3453ac94a99ad9daf17e8a313d274567 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py39haaeba84_0.conda#f97a95fab7c69678ebf6b57396b1323e https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39h1bc45ef_2.conda#d79ed0ee1738151284ebd97092a6a210 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py39h3d6467e_0.conda#4eaef850715aff114e2126a2f1a7b1f0 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_0.conda#2f45c1da3828ec2dc44d84b68916e3e7 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_1.conda#483fe58e14ba244110cd1be2b771b70f +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.0.1-pyhd8ed1ab_0.conda#54661981fd331e20847d8a49543dd9af https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py39hddac248_0.conda#0a3624f600f51df010a274176e356ac5 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py39h6183b62_0.conda#a50279322335a176d74ed167f9ce468b https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py39h5ed0f51_1.conda#9c455b3b3b55f13b2094932740cd3efb @@ -235,32 +229,38 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.0-pyhd8ed1ab_0.conda#7bcadb3fcbb9fadeb53abef5b53b1f98 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-hf7dbed1_0.conda#ad8e8068208846032d6e9ce73d406cee -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.27-pyhd8ed1ab_0.conda#6fbde8d3bdd1874132a1b26a3554b22c -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py39h0126182_0.conda#61cee808ff7830fcceeb4f336cc738b1 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py39h7633fee_0.conda#b673f03c191683996e66c881f90aff2b +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-h8e1006c_1.conda#98206c865fccdea9723f0c6f9241a24f +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4282601_102.conda#05390bd5ad0ddc2f719392d087673344 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb -https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py39hddac248_0.conda#0a3624f600f51df010a274176e356ac5 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py39h474f0d3_1.conda#f62409d868e23c1f97ae2b0db5658385 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39h1bc45ef_2.conda#d79ed0ee1738151284ebd97092a6a210 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.0-pyhd8ed1ab_0.conda#e72cf2f28bd6288a790c13592648f265 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.2-pyhd8ed1ab_0.conda#ddb4fd6105b4005b312625cef210ba67 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py39he9076e7_0.conda#a529a20267af9f085c7f991cae79fef2 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4282601_102.conda#05390bd5ad0ddc2f719392d087673344 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h0f8d45d_0.conda#74b1d479057aa11a70779c83262df85e -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h7fe3ca9_15.conda#f09d307dd78e61e4eb2c6c2f81056d0e -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py39h6183b62_0.conda#c7074f28bd86170a8235ddc995b4ee57 +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_4.conda#e12391692d70732bf1df08b7ecf40095 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py39hf3d152e_0.conda#6ce223b8b14df8bdfa72ac2a10c2fad3 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py39hf3d152e_0.conda#e348333b50ff1f978f3d6af24512de0b +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.0-pyhd8ed1ab_0.conda#16cff214435f2a8163fbe67db9eafb96 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 @@ -270,3 +270,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 + diff --git a/requirements/py310.yml b/requirements/py310.yml index 2ba8abb7ae..fd549a9cf7 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - asv_runner - distributed - filelock - imagehash >=4.0 diff --git a/requirements/py311.yml b/requirements/py311.yml index 80e112d850..a883e5d87a 100644 --- a/requirements/py311.yml +++ b/requirements/py311.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - asv_runner - distributed - filelock - imagehash >=4.0 diff --git a/requirements/py39.yml b/requirements/py39.yml index ed6a5eda54..5b3c17510e 100644 --- a/requirements/py39.yml +++ b/requirements/py39.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - asv_runner - distributed - filelock - imagehash >=4.0 From b6e39d758fc17e167b9323b25484965453846dbf Mon Sep 17 00:00:00 2001 From: Alex Chamberlain-Clay <68277260+acchamber@users.noreply.github.com> Date: Thu, 21 Sep 2023 16:13:07 +0100 Subject: [PATCH 052/134] Updated all np.product calls to np.prod (#5493) * Updated all np.product calls to np.prod * added whats new * review responses * no typos here * added link to name * Update docs/src/whatsnew/latest.rst Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> * Update docs/src/whatsnew/latest.rst Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> --------- Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 6 ++++++ lib/iris/fileformats/_structured_array_identification.py | 2 +- lib/iris/pandas.py | 2 +- lib/iris/tests/integration/netcdf/test_thread_safety.py | 2 +- lib/iris/tests/unit/pandas/test_pandas.py | 6 +++--- 5 files changed, 12 insertions(+), 6 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 35b6121c41..a9b470296f 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -82,6 +82,9 @@ This document explains the changes made to Iris for this release working properly. (Main pull request: :pull:`5437`, more detail: :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) +#. `@acchamber`_ removed several warnings from iris related to Numpy 1.25 deprecations. + (:pull:`5493`) + #. `@trexfeathers`_ replaced all uses of the ``logging.WARNING`` level, in favour of using Python warnings, following team agreement. (:pull:`5488`) @@ -90,11 +93,14 @@ This document explains the changes made to Iris for this release + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: .. _@scottrobinson02: https://github.com/scottrobinson02 +.. _@acchamber: https://github.com/acchamber + .. comment Whatsnew resources in alphabetical order: diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index b313500de7..11c62983e3 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -417,7 +417,7 @@ def filter_strides_of_length(length): # If we are to build another dimension on top of this possible # structure, we need to compute the stride that would be # needed for that dimension. - next_stride = np.product( + next_stride = np.prod( [struct.size for (_, struct) in potential] ) diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 4d6681e94e..4c06530627 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -398,7 +398,7 @@ def as_cubes( cube_shape = getattr(pandas_index, "levshape", (pandas_index.nunique(),)) n_rows = len(pandas_structure) - if np.product(cube_shape) > n_rows: + if np.prod(cube_shape) > n_rows: message = ( f"Not all index values have a corresponding row - {n_rows} rows " f"cannot be reshaped into {cube_shape}. Consider padding with NaN " diff --git a/lib/iris/tests/integration/netcdf/test_thread_safety.py b/lib/iris/tests/integration/netcdf/test_thread_safety.py index 5ed32d0671..c5779250a2 100644 --- a/lib/iris/tests/integration/netcdf/test_thread_safety.py +++ b/lib/iris/tests/integration/netcdf/test_thread_safety.py @@ -38,7 +38,7 @@ def tiny_chunks(): def _check_tiny_loaded_chunks(cube: Cube): assert cube.has_lazy_data() cube_lazy_data = cube.core_data() - assert np.product(cube_lazy_data.chunksize) < cube_lazy_data.size + assert np.prod(cube_lazy_data.chunksize) < cube_lazy_data.size with dask.config.set({"array.chunk-size": "1KiB"}): yield _check_tiny_loaded_chunks diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index fd716bd7c9..d74d7cad9c 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -1075,7 +1075,7 @@ def test_ancillary_variable(self): def test_3d_with_2d_coord(self): df = self._create_pandas(index_levels=3) coord_shape = df.index.levshape[:2] - coord_values = np.arange(np.product(coord_shape)) + coord_values = np.arange(np.prod(coord_shape)) coord_name = "foo" df[coord_name] = coord_values.repeat(df.index.levshape[-1]) result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) @@ -1089,7 +1089,7 @@ def test_3d_with_2d_coord(self): def test_coord_varies_all_indices(self): df = self._create_pandas(index_levels=3) coord_shape = df.index.levshape - coord_values = np.arange(np.product(coord_shape)) + coord_values = np.arange(np.prod(coord_shape)) coord_name = "foo" df[coord_name] = coord_values result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) @@ -1105,7 +1105,7 @@ def test_category_coord(self): # increment. df = self._create_pandas(index_levels=2) coord_shape = df.index.levshape - coord_values = np.arange(np.product(coord_shape)) + coord_values = np.arange(np.prod(coord_shape)) coord_name = "foo" # Create a repeating value along a dimension. From 4f126a0bcb52851a6f28667da65d43d0af89a252 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Fri, 22 Sep 2023 11:30:09 +0100 Subject: [PATCH 053/134] Categorise warnings (#5498) * Introduce IrisUserWarning. * Align existing warning subcategories. * Plant a flag for future DeprecationWarning use. * Introduce test_categorised_warnings(). * Fix backwards compatibility of existing warnings classes. * Categorise all Iris warnings. * Fix test_categorised_warnings() by using ast module. * Add missing warning category kwargs. * Warnings combo experiment. * Warnings combo finalise. * Fix stray comma in coords.py. * Fix failing tests. * Categorise warning tests. * What's New entry. --- docs/src/whatsnew/latest.rst | 8 +- lib/iris/__init__.py | 2 + lib/iris/_concatenate.py | 3 +- lib/iris/_deprecation.py | 10 +- lib/iris/analysis/_regrid.py | 3 +- lib/iris/analysis/calculus.py | 6 +- lib/iris/analysis/cartography.py | 23 +- lib/iris/analysis/geometry.py | 8 +- lib/iris/analysis/maths.py | 3 +- lib/iris/aux_factory.py | 68 ++++-- lib/iris/config.py | 12 +- lib/iris/coord_systems.py | 7 +- lib/iris/coords.py | 18 +- lib/iris/cube.py | 8 +- lib/iris/exceptions.py | 209 ++++++++++++++++++ lib/iris/experimental/regrid.py | 3 +- lib/iris/experimental/ugrid/cf.py | 27 ++- lib/iris/experimental/ugrid/load.py | 31 ++- lib/iris/fileformats/_ff.py | 35 ++- .../fileformats/_nc_load_rules/actions.py | 29 ++- .../fileformats/_nc_load_rules/helpers.py | 130 +++++++++-- lib/iris/fileformats/cf.py | 50 ++++- lib/iris/fileformats/name_loaders.py | 10 +- lib/iris/fileformats/netcdf/loader.py | 22 +- lib/iris/fileformats/netcdf/saver.py | 41 +++- lib/iris/fileformats/nimrod_load_rules.py | 26 ++- lib/iris/fileformats/pp.py | 47 +++- lib/iris/fileformats/pp_save_rules.py | 3 +- lib/iris/fileformats/rules.py | 10 +- lib/iris/iterate.py | 5 +- lib/iris/pandas.py | 5 +- lib/iris/plot.py | 6 +- lib/iris/tests/graphics/idiff.py | 3 +- .../experimental/test_ugrid_load.py | 9 +- .../integration/netcdf/test_delayed_save.py | 8 +- .../tests/integration/netcdf/test_general.py | 6 +- .../netcdf/test_self_referencing.py | 9 +- lib/iris/tests/integration/test_pp.py | 6 +- lib/iris/tests/test_coding_standards.py | 61 +++++ lib/iris/tests/test_concatenate.py | 13 +- lib/iris/tests/test_coordsystem.py | 3 +- lib/iris/tests/test_hybrid.py | 9 +- lib/iris/tests/test_iterate.py | 5 +- lib/iris/tests/test_netcdf.py | 5 +- .../unit/analysis/cartography/test_project.py | 4 +- .../geometry/test_geometry_area_weights.py | 5 +- lib/iris/tests/unit/coords/test_Coord.py | 14 +- lib/iris/tests/unit/cube/test_Cube.py | 12 +- ...test_CFUGridAuxiliaryCoordinateVariable.py | 14 +- .../cf/test_CFUGridConnectivityVariable.py | 14 +- .../ugrid/cf/test_CFUGridMeshVariable.py | 14 +- .../tests/unit/fileformats/ff/test_FF2PP.py | 4 +- .../unit/fileformats/ff/test_FFHeader.py | 5 +- .../name_loaders/test__build_cell_methods.py | 5 +- .../nc_load_rules/actions/__init__.py | 3 +- .../helpers/test_parse_cell_methods.py | 5 +- .../netcdf/loader/test__load_aux_factory.py | 4 +- .../fileformats/netcdf/saver/test_Saver.py | 7 +- .../saver/test_Saver__lazy_stream_data.py | 3 +- .../netcdf/saver/test__fillvalue_report.py | 8 +- .../nimrod_load_rules/test_vertical_coord.py | 2 +- .../tests/unit/fileformats/pp/test_PPField.py | 7 +- 62 files changed, 930 insertions(+), 205 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index a9b470296f..4c732c43df 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -30,7 +30,9 @@ This document explains the changes made to Iris for this release ✨ Features =========== -#. N/A +#. `@trexfeathers`_ and `@HGWright`_ (reviewer) sub-categorised all Iris' + :class:`UserWarning`\s for richer filtering. The full index of + sub-categories can be seen here: :mod:`iris.exceptions` . (:pull:`5498`) 🐛 Bugs Fixed @@ -38,7 +40,7 @@ This document explains the changes made to Iris for this release #. `@scottrobinson02`_ fixed the output units when dividing a coordinate by a cube. (:issue:`5305`, :pull:`5331`) - + #. `@ESadek-MO`_ has updated :mod:`iris.tests.graphics.idiff` to stop duplicated file names preventing acceptance. (:issue:`5098`, :pull:`5482`) @@ -87,7 +89,7 @@ This document explains the changes made to Iris for this release #. `@trexfeathers`_ replaced all uses of the ``logging.WARNING`` level, in favour of using Python warnings, following team agreement. (:pull:`5488`) - + #. `@trexfeathers`_ adapted benchmarking to work with ASV ``>=v0.6`` by no longer using the ``--strict`` argument. (:pull:`5496`) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 0e6670533f..2a3bd8a753 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -175,6 +175,8 @@ def __init__(self, datum_support=False, pandas_ndim=False): # self.__dict__['example_future_flag'] = example_future_flag self.__dict__["datum_support"] = datum_support self.__dict__["pandas_ndim"] = pandas_ndim + # TODO: next major release: set IrisDeprecation to subclass + # DeprecationWarning instead of UserWarning. def __repr__(self): # msg = ('Future(example_future_flag={})') diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index c6d58b1622..837afd73f3 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -16,6 +16,7 @@ import iris.coords import iris.cube +import iris.exceptions from iris.util import array_equal, guess_coord_axis # @@ -998,7 +999,7 @@ def register( raise iris.exceptions.ConcatenateError([msg]) elif not match: msg = f"Found cubes with overlap on concatenate axis {candidate_axis}, skipping concatenation for these cubes" - warnings.warn(msg) + warnings.warn(msg, category=iris.exceptions.IrisUserWarning) # Check for compatible AuxCoords. if match: diff --git a/lib/iris/_deprecation.py b/lib/iris/_deprecation.py index 73fcedcd82..8ad762a558 100644 --- a/lib/iris/_deprecation.py +++ b/lib/iris/_deprecation.py @@ -12,7 +12,13 @@ class IrisDeprecation(UserWarning): - """An Iris deprecation warning.""" + """ + An Iris deprecation warning. + + Note this subclasses UserWarning for backwards compatibility with Iris' + original deprection warnings. Should subclass DeprecationWarning at the + next major release. + """ pass @@ -44,7 +50,7 @@ def warn_deprecated(msg, stacklevel=2): >>> """ - warnings.warn(msg, IrisDeprecation, stacklevel=stacklevel) + warnings.warn(msg, category=IrisDeprecation, stacklevel=stacklevel) # A Mixin for a wrapper class that copies the docstring of the wrapped class diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 4592a0ede7..65679cd968 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -20,6 +20,7 @@ snapshot_grid, ) from iris.analysis._scipy_interpolate import _RegularGridInterpolator +from iris.exceptions import IrisImpossibleUpdateWarning from iris.util import _meshgrid, guess_coord_axis @@ -1136,6 +1137,6 @@ def regrid_reference_surface( "Cannot update aux_factory {!r} because of dropped" " coordinates.".format(factory.name()) ) - warnings.warn(msg) + warnings.warn(msg, category=IrisImpossibleUpdateWarning) return result diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 75b7d86406..44b1adc580 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -24,6 +24,7 @@ import iris.analysis.maths import iris.coord_systems import iris.coords +from iris.exceptions import IrisUserWarning from iris.util import delta __all__ = ["cube_delta", "curl", "differentiate"] @@ -85,7 +86,10 @@ def _construct_midpoint_coord(coord, circular=None): "Construction coordinate midpoints for the '{}' coordinate, " "though it has the attribute 'circular'={}." ) - warnings.warn(msg.format(circular, coord.circular, coord.name())) + warnings.warn( + msg.format(circular, coord.circular, coord.name()), + category=IrisUserWarning, + ) if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(coord) diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 0d17f0b38a..0fae5bc499 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -401,16 +401,25 @@ def area_weights(cube, normalize=False): cs = cube.coord_system("CoordSystem") if isinstance(cs, iris.coord_systems.GeogCS): if cs.inverse_flattening != 0.0: - warnings.warn("Assuming spherical earth from ellipsoid.") + warnings.warn( + "Assuming spherical earth from ellipsoid.", + category=iris.exceptions.IrisDefaultingWarning, + ) radius_of_earth = cs.semi_major_axis elif isinstance(cs, iris.coord_systems.RotatedGeogCS) and ( cs.ellipsoid is not None ): if cs.ellipsoid.inverse_flattening != 0.0: - warnings.warn("Assuming spherical earth from ellipsoid.") + warnings.warn( + "Assuming spherical earth from ellipsoid.", + category=iris.exceptions.IrisDefaultingWarning, + ) radius_of_earth = cs.ellipsoid.semi_major_axis else: - warnings.warn("Using DEFAULT_SPHERICAL_EARTH_RADIUS.") + warnings.warn( + "Using DEFAULT_SPHERICAL_EARTH_RADIUS.", + category=iris.exceptions.IrisDefaultingWarning, + ) radius_of_earth = DEFAULT_SPHERICAL_EARTH_RADIUS # Get the lon and lat coords and axes @@ -551,7 +560,7 @@ def cosine_latitude_weights(cube): warnings.warn( "Out of range latitude values will be " "clipped to the valid range.", - UserWarning, + category=iris.exceptions.IrisDefaultingWarning, ) points = lat.points l_weights = np.cos(points).clip(0.0, 1.0) @@ -665,7 +674,8 @@ def project(cube, target_proj, nx=None, ny=None): # Assume WGS84 latlon if unspecified warnings.warn( "Coordinate system of latitude and longitude " - "coordinates is not specified. Assuming WGS84 Geodetic." + "coordinates is not specified. Assuming WGS84 Geodetic.", + category=iris.exceptions.IrisDefaultingWarning, ) orig_cs = iris.coord_systems.GeogCS( semi_major_axis=6378137.0, inverse_flattening=298.257223563 @@ -857,7 +867,8 @@ def project(cube, target_proj, nx=None, ny=None): lat_coord.name(), lon_coord.name(), [coord.name() for coord in discarded_coords], - ) + ), + category=iris.exceptions.IrisIgnoringWarning, ) # TODO handle derived coords/aux_factories diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index b246b518d4..9898f4e974 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -74,7 +74,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's x dimension at the " "lower end.", - UserWarning, + category=iris.exceptions.IrisGeometryExceedWarning, ) x_min_ix = 0 if x_ascending else x_coord.points.size - 1 @@ -84,7 +84,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's x dimension at the " "upper end.", - UserWarning, + category=iris.exceptions.IrisGeometryExceedWarning, ) x_max_ix = x_coord.points.size - 1 if x_ascending else 0 @@ -94,7 +94,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's y dimension at the " "lower end.", - UserWarning, + category=iris.exceptions.IrisGeometryExceedWarning, ) y_min_ix = 0 if y_ascending else y_coord.points.size - 1 @@ -104,7 +104,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's y dimension at the " "upper end.", - UserWarning, + category=iris.exceptions.IrisGeometryExceedWarning, ) y_max_ix = y_coord.points.size - 1 if y_ascending else 0 diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index b77c6cd80f..5e180c6ee2 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -988,7 +988,8 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): if other.has_bounds(): warnings.warn( "Using {!r} with a bounded coordinate is not well " - "defined; ignoring bounds.".format(operation_name) + "defined; ignoring bounds.".format(operation_name), + category=iris.exceptions.IrisIgnoringBoundsWarning, ) points = other.points diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index f49de62b3f..323c89e3fb 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -21,6 +21,7 @@ metadata_manager_factory, ) import iris.coords +from iris.exceptions import IrisIgnoringBoundsWarning class AuxCoordFactory(CFVariableMixin, metaclass=ABCMeta): @@ -441,7 +442,9 @@ def _check_dependencies(pressure_at_top, sigma, surface_air_pressure): f"Coordinate '{coord.name()}' has bounds. These will " "be disregarded" ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) # Check units if sigma.units.is_unknown(): @@ -522,7 +525,8 @@ def make_coord(self, coord_dims_func): if pressure_at_top.shape[-1:] not in [(), (1,)]: warnings.warn( "Pressure at top coordinate has bounds. These are being " - "disregarded" + "disregarded", + category=IrisIgnoringBoundsWarning, ) pressure_at_top_pts = nd_points_by_key["pressure_at_top"] bds_shape = list(pressure_at_top_pts.shape) + [1] @@ -530,7 +534,8 @@ def make_coord(self, coord_dims_func): if surface_air_pressure.shape[-1:] not in [(), (1,)]: warnings.warn( "Surface pressure coordinate has bounds. These are being " - "disregarded" + "disregarded", + category=IrisIgnoringBoundsWarning, ) surface_air_pressure_pts = nd_points_by_key[ "surface_air_pressure" @@ -595,7 +600,9 @@ def __init__(self, delta=None, sigma=None, orography=None): "Orography coordinate {!r} has bounds." " These will be disregarded.".format(orography.name()) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) self.delta = delta self.sigma = sigma @@ -684,7 +691,7 @@ def make_coord(self, coord_dims_func): warnings.warn( "Orography coordinate has bounds. " "These are being disregarded.", - UserWarning, + category=IrisIgnoringBoundsWarning, stacklevel=2, ) orography_pts = nd_points_by_key["orography"] @@ -739,7 +746,9 @@ def update(self, old_coord, new_coord=None): "Orography coordinate {!r} has bounds." " These will be disregarded.".format(new_coord.name()) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) self.orography = new_coord @@ -806,7 +815,9 @@ def _check_dependencies(delta, sigma, surface_air_pressure): "Surface pressure coordinate {!r} has bounds. These will" " be disregarded.".format(surface_air_pressure.name()) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) # Check units. if sigma is not None and sigma.units.is_unknown(): @@ -898,7 +909,8 @@ def make_coord(self, coord_dims_func): if surface_air_pressure.shape[-1:] not in [(), (1,)]: warnings.warn( "Surface pressure coordinate has bounds. " - "These are being disregarded." + "These are being disregarded.", + category=IrisIgnoringBoundsWarning, ) surface_air_pressure_pts = nd_points_by_key[ "surface_air_pressure" @@ -1012,7 +1024,9 @@ def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) for coord, term in ((depth_c, "depth_c"), (nsigma, "nsigma")): if coord is not None and coord.shape != (1,): @@ -1187,7 +1201,9 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1268,7 +1284,9 @@ def _check_dependencies(sigma, eta, depth): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) # Check units. if sigma is not None and sigma.units.is_unknown(): @@ -1349,7 +1367,9 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1444,7 +1464,9 @@ def _check_dependencies(s, c, eta, depth, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) if depth_c is not None and depth_c.shape != (1,): msg = ( @@ -1543,7 +1565,9 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1637,7 +1661,9 @@ def _check_dependencies(s, eta, depth, a, b, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) coords = ((a, "a"), (b, "b"), (depth_c, "depth_c")) for coord, term in coords: @@ -1740,7 +1766,9 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1839,7 +1867,9 @@ def _check_dependencies(s, c, eta, depth, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) if depth_c is not None and depth_c.shape != (1,): msg = ( @@ -1938,7 +1968,9 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=IrisIgnoringBoundsWarning, stacklevel=2 + ) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) diff --git a/lib/iris/config.py b/lib/iris/config.py index 79d141e53f..03d3d363a6 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -36,6 +36,8 @@ import os.path import warnings +import iris.exceptions + def get_logger( name, datefmt=None, fmt=None, level=None, propagate=None, handler=True @@ -145,7 +147,10 @@ def get_dir_option(section, option, default=None): "Ignoring config item {!r}:{!r} (section:option) as {!r}" " is not a valid directory path." ) - warnings.warn(msg.format(section, option, c_path)) + warnings.warn( + msg.format(section, option, c_path), + category=iris.exceptions.IrisIgnoringWarning, + ) return path @@ -251,7 +256,10 @@ def __setattr__(self, name, value): "Attempting to set invalid value {!r} for " "attribute {!r}. Defaulting to {!r}." ) - warnings.warn(wmsg.format(value, name, good_value)) + warnings.warn( + wmsg.format(value, name, good_value), + category=iris.exceptions.IrisDefaultingWarning, + ) value = good_value self.__dict__[name] = value diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index edf0c1871b..e2003d1286 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -15,6 +15,8 @@ import cartopy.crs as ccrs import numpy as np +import iris.exceptions + def _arg_default(value, default, cast_as=float): """Apply a default value and type for an optional kwarg.""" @@ -449,7 +451,7 @@ def inverse_flattening(self, value): "the GeogCS object. To change other properties set them explicitly" " or create a new GeogCS instance." ) - warnings.warn(wmsg, UserWarning) + warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) value = float(value) self._inverse_flattening = value @@ -818,7 +820,8 @@ def as_cartopy_crs(self): warnings.warn( "Discarding false_easting and false_northing that are " - "not used by Cartopy." + "not used by Cartopy.", + category=iris.exceptions.IrisDefaultingWarning, ) return ccrs.Orthographic( diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 1a6e8d4e6a..3ff9bc8e5e 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2057,7 +2057,8 @@ def contiguous_bounds(self): if self.ndim == 1: warnings.warn( "Coordinate {!r} is not bounded, guessing " - "contiguous bounds.".format(self.name()) + "contiguous bounds.".format(self.name()), + category=iris.exceptions.IrisGuessBoundsWarning, ) bounds = self._guess_bounds() elif self.ndim == 2: @@ -2224,7 +2225,10 @@ def serialize(x): "Collapsing a multi-dimensional coordinate. " "Metadata may not be fully descriptive for {!r}." ) - warnings.warn(msg.format(self.name())) + warnings.warn( + msg.format(self.name()), + category=iris.exceptions.IrisVagueMetadataWarning, + ) else: try: self._sanity_check_bounds() @@ -2234,7 +2238,10 @@ def serialize(x): "Metadata may not be fully descriptive for {!r}. " "Ignoring bounds." ) - warnings.warn(msg.format(str(exc), self.name())) + warnings.warn( + msg.format(str(exc), self.name()), + category=iris.exceptions.IrisVagueMetadataWarning, + ) self.bounds = None else: if not self.is_contiguous(): @@ -2242,7 +2249,10 @@ def serialize(x): "Collapsing a non-contiguous coordinate. " "Metadata may not be fully descriptive for {!r}." ) - warnings.warn(msg.format(self.name())) + warnings.warn( + msg.format(self.name()), + category=iris.exceptions.IrisVagueMetadataWarning, + ) if self.has_bounds(): item = self.core_bounds() diff --git a/lib/iris/cube.py b/lib/iris/cube.py index aec80dce47..60fdbc9c94 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -3857,7 +3857,10 @@ def collapsed(self, coords, aggregator, **kwargs): ] if lat_match: for coord in lat_match: - warnings.warn(msg.format(coord.name())) + warnings.warn( + msg.format(coord.name()), + category=iris.exceptions.IrisUserWarning, + ) # Determine the dimensions we need to collapse (and those we don't) if aggregator.cell_method == "peak": @@ -4444,7 +4447,8 @@ def rolling_window(self, coord, aggregator, window, **kwargs): if coord_.has_bounds(): warnings.warn( "The bounds of coordinate %r were ignored in " - "the rolling window operation." % coord_.name() + "the rolling window operation." % coord_.name(), + category=iris.exceptions.IrisIgnoringBoundsWarning, ) if coord_.ndim != 1: diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 5d3da3349e..919917a01d 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -180,3 +180,212 @@ class CannotAddError(ValueError): """Raised when an object (e.g. coord) cannot be added to a :class:`~iris.cube.Cube`.""" pass + + +############################################################################### +# WARNINGS +# Please namespace all warning objects (i.e. prefix with Iris...). + + +class IrisUserWarning(UserWarning): + """ + Base class for :class:`UserWarning`\\ s generated by Iris. + """ + + pass + + +class IrisLoadWarning(IrisUserWarning): + """Any warning relating to loading.""" + + pass + + +class IrisSaveWarning(IrisUserWarning): + """Any warning relating to saving.""" + + pass + + +class IrisCfWarning(IrisUserWarning): + """Any warning relating to :term:`CF Conventions` .""" + + pass + + +class IrisIgnoringWarning(IrisUserWarning): + """ + Any warning that involves an Iris operation not using some information. + + E.g. :class:`~iris.aux_factory.AuxCoordFactory` generation disregarding + bounds. + """ + + pass + + +class IrisDefaultingWarning(IrisUserWarning): + """ + Any warning that involves Iris changing invalid/missing information. + + E.g. creating a :class:`~iris.coords.AuxCoord` from an invalid + :class:`~iris.coords.DimCoord` definition. + """ + + pass + + +class IrisVagueMetadataWarning(IrisUserWarning): + """Warnings where object metadata may not be fully descriptive.""" + + pass + + +class IrisUnsupportedPlottingWarning(IrisUserWarning): + """Warnings where support for a plotting module/function is not guaranteed.""" + + pass + + +class IrisImpossibleUpdateWarning(IrisUserWarning): + """ + Warnings where it is not possible to update an object. + + Mainly generated during regridding where the necessary information for + updating an :class:`~iris.aux_factory.AuxCoordFactory` is no longer + present. + """ + + pass + + +class IrisGeometryExceedWarning(IrisUserWarning): + """:mod:`iris.analysis.geometry` warnings about geometry exceeding dimensions.""" + + pass + + +class IrisMaskValueMatchWarning(IrisUserWarning): + """Warnings where the value representing masked data is actually present in data.""" + + pass + + +######## + + +class IrisCfLoadWarning(IrisCfWarning, IrisLoadWarning): + """Any warning relating to both loading and :term:`CF Conventions` .""" + + pass + + +class IrisCfSaveWarning(IrisCfWarning, IrisSaveWarning): + """Any warning relating to both saving and :term:`CF Conventions` .""" + + pass + + +class IrisCfInvalidCoordParamWarning(IrisCfLoadWarning): + """ + Warnings where incorrect information for CF coord construction is in a file. + """ + + pass + + +class IrisCfMissingVarWarning(IrisCfLoadWarning): + """ + Warnings where a CF variable references another variable that is not in the file. + """ + + pass + + +class IrisCfLabelVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): + """ + Warnings where a CF string/label variable is being used inappropriately. + """ + + pass + + +class IrisCfNonSpanningVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): + """ + Warnings where a CF variable is ignored because it does not span the required dimension. + """ + + pass + + +######## + + +class IrisIgnoringBoundsWarning(IrisIgnoringWarning): + """ + Warnings where bounds information has not been used by an Iris operation. + """ + + pass + + +class IrisCannotAddWarning(IrisIgnoringWarning): + """ + Warnings where a member object cannot be added to a :class:`~iris.cube.Cube` . + """ + + pass + + +class IrisGuessBoundsWarning(IrisDefaultingWarning): + """ + Warnings where Iris has filled absent bounds information with a best estimate. + """ + + pass + + +class IrisPpClimModifiedWarning(IrisSaveWarning, IrisDefaultingWarning): + """ + Warnings where a climatology has been modified while saving :term:`Post Processing (PP) Format` . + """ + + pass + + +class IrisFactoryCoordNotFoundWarning(IrisLoadWarning): + """ + Warnings where a referenced factory coord can not be found when loading a variable in :term:`NetCDF Format`. + """ + + pass + + +class IrisNimrodTranslationWarning(IrisLoadWarning): + """ + For unsupported vertical coord types in :mod:`iris.file_formats.nimrod_load_rules`. + + (Pre-dates the full categorisation of Iris UserWarnings). + """ + + pass + + +class IrisUnknownCellMethodWarning(IrisCfLoadWarning): + """ + If a loaded :class:`~iris.coords.CellMethod` is not one the method names known to Iris. + + (Pre-dates the full categorisation of Iris UserWarnings). + """ + + pass + + +class IrisSaverFillValueWarning(IrisMaskValueMatchWarning, IrisSaveWarning): + """ + For fill value complications during Iris file saving :term:`NetCDF Format`. + + (Pre-dates the full categorisation of Iris UserWarnings). + """ + + pass diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 76c6002d2b..d5fa7c6f72 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -43,6 +43,7 @@ import iris.analysis.cartography import iris.coord_systems import iris.cube +from iris.exceptions import IrisImpossibleUpdateWarning from iris.util import _meshgrid wmsg = ( @@ -538,7 +539,7 @@ def regrid_reference_surface( "Cannot update aux_factory {!r} because of dropped" " coordinates.".format(factory.name()) ) - warnings.warn(msg) + warnings.warn(msg, category=IrisImpossibleUpdateWarning) return result def __call__(self, src_cube): diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index 86b76c7a75..42c1cfd0a3 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -12,6 +12,7 @@ """ import warnings +from ...exceptions import IrisCfLabelVarWarning, IrisCfMissingVarWarning from ...fileformats import cf from .mesh import Connectivity @@ -65,7 +66,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"{nc_var_name}" ) if warn: - warnings.warn(message) + warnings.warn( + message, category=IrisCfMissingVarWarning + ) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -80,7 +83,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"CF-netCDF label variable." ) if warn: - warnings.warn(message) + warnings.warn( + message, category=IrisCfLabelVarWarning + ) return result @@ -136,7 +141,10 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"variable {nc_var_name}" ) if warn: - warnings.warn(message) + warnings.warn( + message, + category=IrisCfMissingVarWarning, + ) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -154,7 +162,10 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"CF-netCDF label variable." ) if warn: - warnings.warn(message) + warnings.warn( + message, + category=IrisCfLabelVarWarning, + ) return result @@ -211,7 +222,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"referenced by netCDF variable {nc_var_name}" ) if warn: - warnings.warn(message) + warnings.warn( + message, category=IrisCfMissingVarWarning + ) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -226,7 +239,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"variable." ) if warn: - warnings.warn(message) + warnings.warn( + message, category=IrisCfLabelVarWarning + ) return result diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index d2670ac690..67d1491930 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -19,6 +19,11 @@ from ...config import get_logger from ...coords import AuxCoord +from ...exceptions import ( + IrisCfWarning, + IrisDefaultingWarning, + IrisIgnoringWarning, +) from ...fileformats._nc_load_rules.helpers import get_attr_units, get_names from ...fileformats.netcdf import loader as nc_loader from ...io import decode_uri, expand_filespecs @@ -35,6 +40,20 @@ logger = get_logger(__name__, propagate=True, handler=False) +class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + +class _WarnComboCfDefaultingIgnoring( + _WarnComboCfDefaulting, IrisIgnoringWarning +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + class ParseUGridOnLoad(threading.local): def __init__(self): """ @@ -351,7 +370,10 @@ def _build_mesh(cf, mesh_var, file_path): ) if cf_role_message: cf_role_message += " Correcting to 'mesh_topology'." - warnings.warn(cf_role_message) + warnings.warn( + cf_role_message, + category=_WarnComboCfDefaulting, + ) if hasattr(mesh_var, "volume_node_connectivity"): topology_dimension = 3 @@ -369,7 +391,7 @@ def _build_mesh(cf, mesh_var, file_path): f" : *Assuming* topology_dimension={topology_dimension}" ", consistent with the attached connectivities." ) - warnings.warn(msg) + warnings.warn(msg, category=_WarnComboCfDefaulting) else: quoted_topology_dimension = mesh_var.topology_dimension if quoted_topology_dimension != topology_dimension: @@ -381,7 +403,10 @@ def _build_mesh(cf, mesh_var, file_path): f"{quoted_topology_dimension}" " -- ignoring this as it is inconsistent." ) - warnings.warn(msg) + warnings.warn( + msg, + category=_WarnComboCfDefaultingIgnoring, + ) node_dimension = None edge_dimension = getattr(mesh_var, "edge_dimension", None) diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 2545bc39ae..5121b47976 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -13,7 +13,11 @@ import numpy as np -from iris.exceptions import NotYetImplementedError +from iris.exceptions import ( + IrisDefaultingWarning, + IrisLoadWarning, + NotYetImplementedError, +) from iris.fileformats._ff_cross_references import STASH_TRANS from . import pp @@ -118,6 +122,12 @@ REAL_POLE_LON = 5 +class _WarnComboLoadingDefaulting(IrisDefaultingWarning, IrisLoadWarning): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + class Grid: """ An abstract class representing the default/file-level grid @@ -431,7 +441,8 @@ def grid(self): grid_class = NewDynamics warnings.warn( "Staggered grid type: {} not currently interpreted, assuming " - "standard C-grid".format(self.grid_staggering) + "standard C-grid".format(self.grid_staggering), + category=_WarnComboLoadingDefaulting, ) grid = grid_class( self.column_dependent_constants, @@ -554,7 +565,7 @@ def range_order(range1, range2, resolution): "may be incorrect, not having taken into account the " "boundary size." ) - warnings.warn(msg) + warnings.warn(msg, category=IrisLoadWarning) else: range2 = field_dim[0] - res_low range1 = field_dim[0] - halo_dim * res_low @@ -628,7 +639,8 @@ def _adjust_field_for_lbc(self, field): "The LBC has a bdy less than 0. No " "case has previously been seen of " "this, and the decompression may be " - "erroneous." + "erroneous.", + category=IrisLoadWarning, ) field.bzx -= field.bdx * boundary_packing.x_halo field.bzy -= field.bdy * boundary_packing.y_halo @@ -741,7 +753,8 @@ def _extract_field(self): "which has not been explicitly " "handled by the fieldsfile loader." " Assuming the data is on a P grid" - ".".format(stash, subgrid) + ".".format(stash, subgrid), + category=_WarnComboLoadingDefaulting, ) field.x, field.y = grid.vectors(subgrid) @@ -757,14 +770,18 @@ def _extract_field(self): "STASH to grid type mapping. Picking the P " "position as the cell type".format(stash) ) - warnings.warn(msg) + warnings.warn( + msg, + category=_WarnComboLoadingDefaulting, + ) field.bzx, field.bdx = grid.regular_x(subgrid) field.bzy, field.bdy = grid.regular_y(subgrid) field.bplat = grid.pole_lat field.bplon = grid.pole_lon elif no_x or no_y: warnings.warn( - "Partially missing X or Y coordinate values." + "Partially missing X or Y coordinate values.", + category=IrisLoadWarning, ) # Check for LBC fields. @@ -810,7 +827,9 @@ def _extract_field(self): "Input field skipped as PPField creation failed :" " error = {!r}" ) - warnings.warn(msg.format(str(valerr))) + warnings.warn( + msg.format(str(valerr)), category=IrisLoadWarning + ) def __iter__(self): return pp._interpret_fields(self._extract_field()) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 09237d3f11..be84b65132 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -44,6 +44,7 @@ import warnings from iris.config import get_logger +import iris.exceptions import iris.fileformats.cf import iris.fileformats.pp as pp @@ -53,6 +54,24 @@ logger = get_logger(__name__, fmt="[%(funcName)s]") +class _WarnComboCfLoadIgnoring( + iris.exceptions.IrisCfLoadWarning, + iris.exceptions.IrisIgnoringWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + +class _WarnComboLoadIgnoring( + iris.exceptions.IrisLoadWarning, + iris.exceptions.IrisIgnoringWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + def _default_rulenamesfunc(func_name): # A simple default function to deduce the rules-name from an action-name. funcname_prefix = "action_" @@ -471,7 +490,10 @@ def action_formula_type(engine, formula_root_fact): succeed = False rule_name += f"(FAILED - unrecognised formula type = {formula_type!r})" msg = f"Ignored formula of unrecognised type: {formula_type!r}." - warnings.warn(msg) + warnings.warn( + msg, + category=_WarnComboCfLoadIgnoring, + ) if succeed: # Check we don't already have one. existing_type = engine.requires.get("formula_type") @@ -486,7 +508,10 @@ def action_formula_type(engine, formula_root_fact): f"Formula of type ={formula_type!r} " f"overrides another of type ={existing_type!r}.)" ) - warnings.warn(msg) + warnings.warn( + msg, + category=_WarnComboLoadIgnoring, + ) rule_name += f"_{formula_type}" # Set 'requires' info for iris.fileformats.netcdf._load_aux_factory. engine.requires["formula_type"] = formula_type diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index bbf9c660c5..19a9cd18ca 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -219,6 +219,42 @@ ] +class _WarnComboIgnoringLoad( + iris.exceptions.IrisIgnoringWarning, + iris.exceptions.IrisLoadWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + +class _WarnComboDefaultingLoad( + iris.exceptions.IrisDefaultingWarning, + iris.exceptions.IrisLoadWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + +class _WarnComboDefaultingCfLoad( + iris.exceptions.IrisCfLoadWarning, + iris.exceptions.IrisDefaultingWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + +class _WarnComboIgnoringCfLoad( + iris.exceptions.IrisIgnoringWarning, + iris.exceptions.IrisCfLoadWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: """ Split a CF cell_methods attribute string into a list of zero or more cell @@ -256,7 +292,11 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: "Cell methods may be incorrectly parsed due to mismatched " "brackets" ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, + category=iris.exceptions.IrisCfLoadWarning, + stacklevel=2, + ) if bracket_depth > 0 and ind in name_start_inds: name_start_inds.remove(ind) @@ -275,14 +315,21 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: msg = ( f"Failed to fully parse cell method string: {nc_cell_methods}" ) - warnings.warn(msg, UserWarning, stacklevel=2) + warnings.warn( + msg, category=iris.exceptions.IrisCfLoadWarning, stacklevel=2 + ) continue nc_cell_methods_matches.append(nc_cell_method_match) return nc_cell_methods_matches -class UnknownCellMethodWarning(Warning): +class UnknownCellMethodWarning(iris.exceptions.IrisUnknownCellMethodWarning): + """ + Backwards compatible form of :class:`iris.exceptions.IrisUnknownCellMethodWarning`. + """ + + # TODO: remove at the next major release. pass @@ -320,7 +367,7 @@ def parse_cell_methods(nc_cell_methods): msg = "NetCDF variable contains unknown cell method {!r}" warnings.warn( msg.format("{}".format(method_words[0])), - UnknownCellMethodWarning, + category=UnknownCellMethodWarning, ) d[_CM_METHOD] = method name = d[_CM_NAME] @@ -389,7 +436,6 @@ def parse_cell_methods(nc_cell_methods): ################################################################################ def build_cube_metadata(engine): """Add the standard meta data to the cube.""" - cf_var = engine.cf_var cube = engine.cube @@ -436,7 +482,10 @@ def build_cube_metadata(engine): cube.attributes[str(attr_name)] = attr_value except ValueError as e: msg = "Skipping global attribute {!r}: {}" - warnings.warn(msg.format(attr_name, str(e))) + warnings.warn( + msg.format(attr_name, str(e)), + category=_WarnComboIgnoringLoad, + ) ################################################################################ @@ -479,7 +528,7 @@ def _get_ellipsoid(cf_grid_var): "applied. To apply the datum when loading, use the " "iris.FUTURE.datum_support flag." ) - warnings.warn(wmsg, FutureWarning, stacklevel=14) + warnings.warn(wmsg, category=FutureWarning, stacklevel=14) datum = None if datum is not None: @@ -512,7 +561,10 @@ def build_rotated_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0 ) if north_pole_latitude is None or north_pole_longitude is None: - warnings.warn("Rotated pole position is not fully specified") + warnings.warn( + "Rotated pole position is not fully specified", + category=iris.exceptions.IrisCfLoadWarning, + ) north_pole_grid_lon = getattr( cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0 @@ -859,7 +911,10 @@ def get_attr_units(cf_var, attributes): msg = "Ignoring netCDF variable {!r} invalid units {!r}".format( cf_var.cf_name, attr_units ) - warnings.warn(msg) + warnings.warn( + msg, + category=_WarnComboIgnoringCfLoad, + ) attributes["invalid_units"] = attr_units attr_units = UNKNOWN_UNIT_STRING @@ -948,7 +1003,8 @@ def get_cf_bounds_var(cf_coord_var): if attr_bounds is not None and attr_climatology is not None: warnings.warn( "Ignoring climatology in favour of bounds attribute " - "on NetCDF variable {!r}.".format(cf_coord_var.cf_name) + "on NetCDF variable {!r}.".format(cf_coord_var.cf_name), + category=_WarnComboIgnoringCfLoad, ) return cf_bounds_var, climatological @@ -1007,7 +1063,10 @@ def build_dimension_coordinate( if ma.is_masked(points_data): points_data = ma.filled(points_data) msg = "Gracefully filling {!r} dimension coordinate masked points" - warnings.warn(msg.format(str(cf_coord_var.cf_name))) + warnings.warn( + msg.format(str(cf_coord_var.cf_name)), + category=_WarnComboDefaultingLoad, + ) # Get any coordinate bounds. cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) @@ -1017,7 +1076,10 @@ def build_dimension_coordinate( if ma.is_masked(bounds_data): bounds_data = ma.filled(bounds_data) msg = "Gracefully filling {!r} dimension coordinate masked bounds" - warnings.warn(msg.format(str(cf_coord_var.cf_name))) + warnings.warn( + msg.format(str(cf_coord_var.cf_name)), + category=_WarnComboDefaultingLoad, + ) # Handle transposed bounds where the vertex dimension is not # the last one. Test based on shape to support different # dimension names. @@ -1082,7 +1144,10 @@ def build_dimension_coordinate( "Failed to create {name!r} dimension coordinate: {error}\n" "Gracefully creating {name!r} auxiliary coordinate instead." ) - warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) + warnings.warn( + msg.format(name=str(cf_coord_var.cf_name), error=e_msg), + category=_WarnComboDefaultingCfLoad, + ) coord = iris.coords.AuxCoord( points_data, standard_name=standard_name, @@ -1097,7 +1162,10 @@ def build_dimension_coordinate( try: cube.add_aux_coord(coord, data_dims) except iris.exceptions.CannotAddError as e_msg: - warnings.warn(coord_skipped_msg.format(error=e_msg)) + warnings.warn( + coord_skipped_msg.format(error=e_msg), + category=iris.exceptions.IrisCannotAddWarning, + ) coord_skipped = True else: # Add the dimension coordinate to the cube. @@ -1108,7 +1176,10 @@ def build_dimension_coordinate( # Scalar coords are placed in the aux_coords container. cube.add_aux_coord(coord, data_dims) except iris.exceptions.CannotAddError as e_msg: - warnings.warn(coord_skipped_msg.format(error=e_msg)) + warnings.warn( + coord_skipped_msg.format(error=e_msg), + category=iris.exceptions.IrisCannotAddWarning, + ) coord_skipped = True if not coord_skipped: @@ -1186,7 +1257,10 @@ def build_auxiliary_coordinate( cube.add_aux_coord(coord, data_dims) except iris.exceptions.CannotAddError as e_msg: msg = "{name!r} coordinate not added to Cube: {error}" - warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) + warnings.warn( + msg.format(name=str(cf_coord_var.cf_name), error=e_msg), + category=iris.exceptions.IrisCannotAddWarning, + ) else: # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) @@ -1237,7 +1311,10 @@ def build_cell_measures(engine, cf_cm_var): cube.add_cell_measure(cell_measure, data_dims) except iris.exceptions.CannotAddError as e_msg: msg = "{name!r} cell measure not added to Cube: {error}" - warnings.warn(msg.format(name=str(cf_cm_var.cf_name), error=e_msg)) + warnings.warn( + msg.format(name=str(cf_cm_var.cf_name), error=e_msg), + category=iris.exceptions.IrisCannotAddWarning, + ) else: # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["cell_measures"].append( @@ -1286,7 +1363,10 @@ def build_ancil_var(engine, cf_av_var): cube.add_ancillary_variable(av, data_dims) except iris.exceptions.CannotAddError as e_msg: msg = "{name!r} ancillary variable not added to Cube: {error}" - warnings.warn(msg.format(name=str(cf_av_var.cf_name), error=e_msg)) + warnings.warn( + msg.format(name=str(cf_av_var.cf_name), error=e_msg), + category=iris.exceptions.IrisCannotAddWarning, + ) else: # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["ancillary_variables"].append( @@ -1503,7 +1583,8 @@ def has_supported_mercator_parameters(engine, cf_name): ): warnings.warn( "It does not make sense to provide both " - '"scale_factor_at_projection_origin" and "standard_parallel".' + '"scale_factor_at_projection_origin" and "standard_parallel".', + category=iris.exceptions.IrisCfInvalidCoordParamWarning, ) is_valid = False @@ -1533,7 +1614,10 @@ def has_supported_polar_stereographic_parameters(engine, cf_name): latitude_of_projection_origin != 90 and latitude_of_projection_origin != -90 ): - warnings.warn('"latitude_of_projection_origin" must be +90 or -90.') + warnings.warn( + '"latitude_of_projection_origin" must be +90 or -90.', + category=iris.exceptions.IrisCfInvalidCoordParamWarning, + ) is_valid = False if ( @@ -1542,14 +1626,16 @@ def has_supported_polar_stereographic_parameters(engine, cf_name): ): warnings.warn( "It does not make sense to provide both " - '"scale_factor_at_projection_origin" and "standard_parallel".' + '"scale_factor_at_projection_origin" and "standard_parallel".', + category=iris.exceptions.IrisCfInvalidCoordParamWarning, ) is_valid = False if scale_factor_at_projection_origin is None and standard_parallel is None: warnings.warn( 'One of "scale_factor_at_projection_origin" and ' - '"standard_parallel" is required.' + '"standard_parallel" is required.', + category=iris.exceptions.IrisCfInvalidCoordParamWarning, ) is_valid = False diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 2ed01846bd..f412955adb 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -23,6 +23,7 @@ import numpy as np import numpy.ma as ma +import iris.exceptions from iris.fileformats.netcdf import _thread_safe_nc import iris.util @@ -280,7 +281,10 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF ancillary data variable %r, referenced by netCDF variable %r" - warnings.warn(message % (name, nc_var_name)) + warnings.warn( + message % (name, nc_var_name), + category=iris.exceptions.IrisCfMissingVarWarning, + ) else: result[name] = CFAncillaryDataVariable( name, variables[name] @@ -323,7 +327,10 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF auxiliary coordinate variable %r, referenced by netCDF variable %r" - warnings.warn(message % (name, nc_var_name)) + warnings.warn( + message % (name, nc_var_name), + category=iris.exceptions.IrisCfMissingVarWarning, + ) else: # Restrict to non-string type i.e. not a CFLabelVariable. if not _is_str_dtype(variables[name]): @@ -369,7 +376,10 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF boundary variable %r, referenced by netCDF variable %r" - warnings.warn(message % (name, nc_var_name)) + warnings.warn( + message % (name, nc_var_name), + category=iris.exceptions.IrisCfMissingVarWarning, + ) else: result[name] = CFBoundaryVariable( name, variables[name] @@ -441,7 +451,10 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF climatology variable %r, referenced by netCDF variable %r" - warnings.warn(message % (name, nc_var_name)) + warnings.warn( + message % (name, nc_var_name), + category=iris.exceptions.IrisCfMissingVarWarning, + ) else: result[name] = CFClimatologyVariable( name, variables[name] @@ -582,7 +595,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if warn: message = "Missing CF-netCDF formula term variable %r, referenced by netCDF variable %r" warnings.warn( - message % (variable_name, nc_var_name) + message % (variable_name, nc_var_name), + category=iris.exceptions.IrisCfMissingVarWarning, ) else: if variable_name not in result: @@ -646,7 +660,10 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF grid mapping variable %r, referenced by netCDF variable %r" - warnings.warn(message % (name, nc_var_name)) + warnings.warn( + message % (name, nc_var_name), + category=iris.exceptions.IrisCfMissingVarWarning, + ) else: result[name] = CFGridMappingVariable( name, variables[name] @@ -685,7 +702,10 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF label variable %r, referenced by netCDF variable %r" - warnings.warn(message % (name, nc_var_name)) + warnings.warn( + message % (name, nc_var_name), + category=iris.exceptions.IrisCfMissingVarWarning, + ) else: # Register variable, but only allow string type. var = variables[name] @@ -857,7 +877,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if warn: message = "Missing CF-netCDF measure variable %r, referenced by netCDF variable %r" warnings.warn( - message % (variable_name, nc_var_name) + message % (variable_name, nc_var_name), + category=iris.exceptions.IrisCfMissingVarWarning, ) else: result[variable_name] = CFMeasureVariable( @@ -1069,7 +1090,8 @@ def __init__(self, file_source, warn=False, monotonic=False): ]: warnings.warn( "Optimise CF-netCDF loading by converting data from NetCDF3 " - 'to NetCDF4 file format using the "nccopy" command.' + 'to NetCDF4 file format using the "nccopy" command.', + category=iris.exceptions.IrisLoadWarning, ) self._check_monotonic = monotonic @@ -1210,7 +1232,10 @@ def _build(cf_variable): cf_variable.dimensions, ) ) - warnings.warn(msg) + warnings.warn( + msg, + category=iris.exceptions.IrisCfNonSpanningVarWarning, + ) # Build CF data variable relationships. if isinstance(cf_variable, CFDataVariable): @@ -1261,7 +1286,10 @@ def _build(cf_variable): cf_variable.dimensions, ) ) - warnings.warn(msg) + warnings.warn( + msg, + category=iris.exceptions.IrisCfNonSpanningVarWarning, + ) # Add the CF group to the variable. cf_variable.cf_group = cf_group diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index 0189a8806f..cb8867b6ea 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -17,7 +17,7 @@ import iris.coord_systems from iris.coords import AuxCoord, CellMethod, DimCoord import iris.cube -from iris.exceptions import TranslationError +from iris.exceptions import IrisLoadWarning, TranslationError import iris.util EARTH_RADIUS = 6371229.0 @@ -273,7 +273,9 @@ def _parse_units(units): try: units = cf_units.Unit(units) except ValueError: - warnings.warn("Unknown units: {!r}".format(units)) + warnings.warn( + "Unknown units: {!r}".format(units), category=IrisLoadWarning + ) units = cf_units.Unit(None) return units @@ -611,7 +613,9 @@ def _build_cell_methods(av_or_ints, coord): else: cell_method = None msg = "Unknown {} statistic: {!r}. Unable to create cell method." - warnings.warn(msg.format(coord, av_or_int)) + warnings.warn( + msg.format(coord, av_or_int), category=IrisLoadWarning + ) cell_methods.append(cell_method) # NOTE: this can be a None return cell_methods diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 20d255ea44..29202af89e 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -50,6 +50,15 @@ NetCDFDataProxy = _thread_safe_nc.NetCDFDataProxy +class _WarnComboIgnoringBoundsLoad( + iris.exceptions.IrisIgnoringBoundsWarning, + iris.exceptions.IrisLoadWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + def _actions_engine(): # Return an 'actions engine', which provides a pyke-rules-like interface to # the core cf translation code. @@ -352,7 +361,8 @@ def coord_from_term(term): return coord warnings.warn( "Unable to find coordinate for variable " - "{!r}".format(name) + "{!r}".format(name), + category=iris.exceptions.IrisFactoryCoordNotFoundWarning, ) if formula_type == "atmosphere_sigma_coordinate": @@ -393,7 +403,10 @@ def coord_from_term(term): coord_p0.name() ) ) - warnings.warn(msg) + warnings.warn( + msg, + category=_WarnComboIgnoringBoundsLoad, + ) coord_a = coord_from_term("a") if coord_a is not None: if coord_a.units.is_unknown(): @@ -584,7 +597,10 @@ def load_cubes(file_sources, callback=None, constraints=None): try: _load_aux_factory(engine, cube) except ValueError as e: - warnings.warn("{}".format(e)) + warnings.warn( + "{}".format(e), + category=iris.exceptions.IrisLoadWarning, + ) # Perform any user registered callback function. cube = run_callback(callback, cube, cf_var, file_source) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index c0cfd3d10b..1ff69df1f7 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -157,6 +157,15 @@ } +class _WarnComboMaskSave( + iris.exceptions.IrisMaskValueMatchWarning, + iris.exceptions.IrisSaveWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + class CFNameCoordMap: """Provide a simple CF name to CF coordinate mapping.""" @@ -308,7 +317,12 @@ def _data_fillvalue_check(arraylib, data, check_value): return is_masked, contains_value -class SaverFillValueWarning(UserWarning): +class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning): + """ + Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`. + """ + + # TODO: remove at the next major release. pass @@ -359,7 +373,10 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): ) if warn and result is not None: - warnings.warn(result) + warnings.warn( + result, + category=_WarnComboMaskSave, + ) return result @@ -733,7 +750,7 @@ def write( msg = "cf_profile is available but no {} defined.".format( "cf_patch" ) - warnings.warn(msg) + warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) @staticmethod def check_attribute_compliance(container, data_dtype): @@ -1144,7 +1161,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): "Unable to determine formula terms " "for AuxFactory: {!r}".format(factory) ) - warnings.warn(msg) + warnings.warn(msg, category=iris.exceptions.IrisSaveWarning) else: # Override `standard_name`, `long_name`, and `axis` of the # primary coord that signals the presence of a dimensionless @@ -2126,7 +2143,10 @@ def add_ellipsoid(ellipsoid): # osgb (a specific tmerc) elif isinstance(cs, iris.coord_systems.OSGB): - warnings.warn("OSGB coordinate system not yet handled") + warnings.warn( + "OSGB coordinate system not yet handled", + category=iris.exceptions.IrisSaveWarning, + ) # lambert azimuthal equal area elif isinstance( @@ -2195,7 +2215,8 @@ def add_ellipsoid(ellipsoid): warnings.warn( "Unable to represent the horizontal " "coordinate system. The coordinate system " - "type %r is not yet implemented." % type(cs) + "type %r is not yet implemented." % type(cs), + category=iris.exceptions.IrisSaveWarning, ) self._coord_systems.append(cs) @@ -2359,7 +2380,7 @@ def set_packing_ncattrs(cfvar): "attribute, but {attr_name!r} should only be a CF " "global attribute.".format(attr_name=attr_name) ) - warnings.warn(msg) + warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) _setncattr(cf_var, attr_name, value) @@ -2593,7 +2614,9 @@ def complete(self, issue_warnings=True) -> List[Warning]: if issue_warnings: # Issue any delayed warnings from the compute. for delayed_warning in result_warnings: - warnings.warn(delayed_warning) + warnings.warn( + delayed_warning, category=iris.exceptions.IrisSaveWarning + ) return result_warnings @@ -2911,7 +2934,7 @@ def is_valid_packspec(p): msg = "cf_profile is available but no {} defined.".format( "cf_patch_conventions" ) - warnings.warn(msg) + warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) # Add conventions attribute. sman.update_global_attributes(Conventions=conventions) diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index fd1ccb0e95..17db0644ee 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -16,7 +16,11 @@ import iris import iris.coord_systems from iris.coords import DimCoord -from iris.exceptions import CoordinateNotFoundError, TranslationError +from iris.exceptions import ( + CoordinateNotFoundError, + IrisNimrodTranslationWarning, + TranslationError, +) __all__ = ["run"] @@ -28,7 +32,12 @@ ) -class TranslationWarning(Warning): +class TranslationWarning(IrisNimrodTranslationWarning): + """ + Backwards compatible form of :class:`iris.exceptions.IrisNimrodTranslationWarning`. + """ + + # TODO: remove at the next major release. pass @@ -181,7 +190,8 @@ def units(cube, field): warnings.warn( "Unhandled units '{0}' recorded in cube attributes.".format( field_units - ) + ), + category=IrisNimrodTranslationWarning, ) cube.attributes["invalid_units"] = field_units @@ -417,7 +427,8 @@ def coord_system(field, handle_metadata_errors): if any([is_missing(field, v) for v in crs_args]): warnings.warn( "Coordinate Reference System is not completely defined. " - "Plotting and reprojection may be impaired." + "Plotting and reprojection may be impaired.", + category=IrisNimrodTranslationWarning, ) coord_sys = iris.coord_systems.TransverseMercator( *crs_args, iris.coord_systems.GeogCS(**ellipsoid) @@ -539,7 +550,7 @@ def vertical_coord(cube, field): f"{field.vertical_coord_type} != {field.reference_vertical_coord_type}. " f"Assuming {field.vertical_coord_type}" ) - warnings.warn(msg) + warnings.warn(msg, category=IrisNimrodTranslationWarning) coord_point = field.vertical_coord if coord_point == 8888.0: @@ -586,7 +597,7 @@ def vertical_coord(cube, field): warnings.warn( "Vertical coord {!r} not yet handled" "".format(field.vertical_coord_type), - TranslationWarning, + category=TranslationWarning, ) @@ -831,7 +842,8 @@ def probability_coord(cube, field, handle_metadata_errors): ) warnings.warn( f"No default units for {coord_name} coord of {cube.name()}. " - "Meta-data may be incomplete." + "Meta-data may be incomplete.", + category=IrisNimrodTranslationWarning, ) new_coord = iris.coords.AuxCoord( np.array(coord_val, dtype=np.float32), bounds=bounds, **coord_keys diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 65e0e16d72..e19ba3adff 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -27,6 +27,7 @@ from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data import iris.config import iris.coord_systems +import iris.exceptions # NOTE: this is for backwards-compatitibility *ONLY* # We could simply remove it for v2.0 ? @@ -220,6 +221,33 @@ } +class _WarnComboLoadingMask( + iris.exceptions.IrisLoadWarning, + iris.exceptions.IrisMaskValueMatchWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + +class _WarnComboLoadingDefaulting( + iris.exceptions.IrisDefaultingWarning, + iris.exceptions.IrisLoadWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + +class _WarnComboIgnoringLoad( + iris.exceptions.IrisIgnoringWarning, + iris.exceptions.IrisLoadWarning, +): + """One-off combination of warning classes - enhances user filtering.""" + + pass + + class STASH(collections.namedtuple("STASH", "model section item")): """ A class to hold a single STASH code. @@ -1165,7 +1193,10 @@ def save(self, file_handle): "missing data. To save these as normal values, please " "set the field BMDI not equal to any valid data points." ) - warnings.warn(msg.format(mdi)) + warnings.warn( + msg.format(mdi), + category=_WarnComboLoadingMask, + ) if isinstance(data, ma.MaskedArray): if ma.is_masked(data): data = data.filled(fill_value=mdi) @@ -1290,7 +1321,8 @@ def save(self, file_handle): warnings.warn( "Downcasting array precision from float64 to float32" " for save.If float64 precision is required then" - " please save in a different format" + " please save in a different format", + category=_WarnComboLoadingDefaulting, ) data = data.astype(">f4") lb[self.HEADER_DICT["lbuser"][0]] = 1 @@ -1732,7 +1764,8 @@ def _interpret_fields(fields): warnings.warn( "Landmask compressed fields existed without a " "landmask to decompress with. The data will have " - "a shape of (0, 0) and will not read." + "a shape of (0, 0) and will not read.", + category=iris.exceptions.IrisLoadWarning, ) mask_shape = (0, 0) else: @@ -1901,7 +1934,10 @@ def _field_gen(filename, read_data_bytes, little_ended=False): "Unable to interpret field {}. {}. Skipping " "the remainder of the file.".format(field_count, str(e)) ) - warnings.warn(msg) + warnings.warn( + msg, + category=_WarnComboIgnoringLoad, + ) break # Skip the trailing 4-byte word containing the header length @@ -1921,7 +1957,8 @@ def _field_gen(filename, read_data_bytes, little_ended=False): warnings.warn( wmsg.format( pp_field.lblrec * PP_WORD_DEPTH, len_of_data_plus_extra - ) + ), + category=_WarnComboIgnoringLoad, ) break diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 0369fc9fd0..998255ff2b 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -10,6 +10,7 @@ import iris from iris.aux_factory import HybridHeightFactory, HybridPressureFactory +from iris.exceptions import IrisPpClimModifiedWarning from iris.fileformats._ff_cross_references import STASH_TRANS from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP from iris.fileformats.rules import ( @@ -890,4 +891,4 @@ def verify(cube, field): def _conditional_warning(condition, warning): if condition: - warnings.warn(warning) + warnings.warn(warning, category=IrisPpClimModifiedWarning) diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 707fd58757..d5a4b9c823 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -47,7 +47,8 @@ def as_cube(self): src_cubes = src_cubes.merge(unique=False) if len(src_cubes) > 1: warnings.warn( - "Multiple reference cubes for {}".format(self.name) + "Multiple reference cubes for {}".format(self.name), + category=iris.exceptions.IrisUserWarning, ) src_cube = src_cubes[-1] @@ -329,7 +330,7 @@ def _make_cube(field, converter): cube.units = metadata.units except ValueError: msg = "Ignoring PP invalid units {!r}".format(metadata.units) - warnings.warn(msg) + warnings.warn(msg, category=iris.exceptions.IrisIgnoringWarning) cube.attributes["invalid_units"] = metadata.units cube.units = cf_units._UNKNOWN_UNIT_STRING @@ -350,7 +351,10 @@ def _resolve_factory_references( except _ReferenceError as e: msg = "Unable to create instance of {factory}. " + str(e) factory_name = factory.factory_class.__name__ - warnings.warn(msg.format(factory=factory_name)) + warnings.warn( + msg.format(factory=factory_name), + category=iris.exceptions.IrisUserWarning, + ) else: aux_factory = factory.factory_class(*args) cube.add_aux_factory(aux_factory) diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index cf16c9cbe6..cc82433e85 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -14,6 +14,8 @@ import numpy as np +from iris.exceptions import IrisUserWarning + __all__ = ["izip"] @@ -164,7 +166,8 @@ def izip(*cubes, **kwargs): warnings.warn( "Iterating over coordinate '%s' in step whose " "definitions match but whose values " - "differ." % coord_a.name() + "differ." % coord_a.name(), + category=IrisUserWarning, ) return _ZipSlicesIterator( diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 4c06530627..cb26b638e4 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -29,6 +29,7 @@ from iris._deprecation import warn_deprecated from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube, CubeList +from iris.exceptions import IrisIgnoringWarning def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): @@ -446,7 +447,7 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): if columns_ignored: ignored_args = ", ".join([t[2] for t in class_arg_mapping]) message = f"The input pandas_structure is a Series; ignoring arguments: {ignored_args} ." - warnings.warn(message) + warnings.warn(message, category=IrisIgnoringWarning) class_arg_mapping = [] non_data_names = [] @@ -896,7 +897,7 @@ def merge_metadata(meta_var_list): "'iris.FUTURE.pandas_ndim = True'. More info is in the " "documentation." ) - warnings.warn(message, FutureWarning) + warnings.warn(message, category=FutureWarning) # The legacy behaviour. data = cube.data diff --git a/lib/iris/plot.py b/lib/iris/plot.py index ebcb5c3bcb..28b458f715 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -34,7 +34,7 @@ import iris.coord_systems import iris.coords import iris.cube -from iris.exceptions import IrisError +from iris.exceptions import IrisError, IrisUnsupportedPlottingWarning # Importing iris.palette to register the brewer palettes. import iris.palette @@ -2023,7 +2023,7 @@ def update_animation_iris(i, cubes, vmin, vmax, coords): "use: {}." ) msg = msg.format(plot_func.__module__, supported) - warnings.warn(msg, UserWarning) + warnings.warn(msg, category=IrisUnsupportedPlottingWarning) supported = ["contour", "contourf", "pcolor", "pcolormesh"] if plot_func.__name__ not in supported: @@ -2032,7 +2032,7 @@ def update_animation_iris(i, cubes, vmin, vmax, coords): "use: {}." ) msg = msg.format(plot_func.__name__, supported) - warnings.warn(msg, UserWarning) + warnings.warn(msg, category=IrisUnsupportedPlottingWarning) # Determine plot range. vmin = kwargs.pop("vmin", min([cc.data.min() for cc in cubes])) diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index 62e72f4e0e..4af7f4726d 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -28,6 +28,7 @@ from matplotlib.testing.exceptions import ImageComparisonFailure # noqa import matplotlib.widgets as mwidget # noqa +from iris.exceptions import IrisIgnoringWarning # noqa import iris.tests # noqa import iris.tests.graphics as graphics # noqa @@ -151,7 +152,7 @@ def step_over_diffs(result_dir, display=True): distance = graphics.get_phash(reference_image_path) - phash except FileNotFoundError: wmsg = "Ignoring unregistered test result {!r}." - warnings.warn(wmsg.format(test_key)) + warnings.warn(wmsg.format(test_key), category=IrisIgnoringWarning) continue processed = True diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py index b0b60ee506..d94e85d2f5 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_load.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_load.py @@ -19,6 +19,7 @@ import pytest from iris import Constraint, load +from iris.exceptions import IrisCfWarning from iris.experimental.ugrid.load import ( PARSE_UGRID_ON_LOAD, load_mesh, @@ -170,7 +171,7 @@ def create_synthetic_file(self, **create_kwargs): def test_mesh_bad_topology_dimension(self): # Check that the load generates a suitable warning. warn_regex = r"topology_dimension.* ignoring" - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns(IrisCfWarning, match=warn_regex): template = "minimal_bad_topology_dim" dim_line = "mesh_var:topology_dimension = 1 ;" # which is wrong ! cube = self.create_synthetic_test_cube( @@ -183,7 +184,7 @@ def test_mesh_bad_topology_dimension(self): def test_mesh_no_topology_dimension(self): # Check that the load generates a suitable warning. warn_regex = r"Mesh variable.* has no 'topology_dimension'" - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns(IrisCfWarning, match=warn_regex): template = "minimal_bad_topology_dim" dim_line = "" # don't create ANY topology_dimension property cube = self.create_synthetic_test_cube( @@ -196,7 +197,7 @@ def test_mesh_no_topology_dimension(self): def test_mesh_bad_cf_role(self): # Check that the load generates a suitable warning. warn_regex = r"inappropriate cf_role" - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns(IrisCfWarning, match=warn_regex): template = "minimal_bad_mesh_cf_role" dim_line = 'mesh_var:cf_role = "foo" ;' _ = self.create_synthetic_test_cube( @@ -206,7 +207,7 @@ def test_mesh_bad_cf_role(self): def test_mesh_no_cf_role(self): # Check that the load generates a suitable warning. warn_regex = r"no cf_role attribute" - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns(IrisCfWarning, match=warn_regex): template = "minimal_bad_mesh_cf_role" dim_line = "" _ = self.create_synthetic_test_cube( diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index 616feb3b0e..09f6235aab 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -17,8 +17,8 @@ import pytest import iris +from iris.exceptions import IrisSaverFillValueWarning from iris.fileformats.netcdf._thread_safe_nc import default_fillvals -from iris.fileformats.netcdf.saver import SaverFillValueWarning import iris.tests from iris.tests.stock import realistic_4d @@ -311,7 +311,7 @@ def test_fill_warnings(self, warning_type, output_path, save_is_delayed): result_warnings = [ log.message for log in logged_warnings - if isinstance(log.message, SaverFillValueWarning) + if isinstance(log.message, IrisSaverFillValueWarning) ] if save_is_delayed: @@ -320,7 +320,9 @@ def test_fill_warnings(self, warning_type, output_path, save_is_delayed): # Complete the operation now with warnings.catch_warnings(): # NOTE: warnings should *not* be issued here, instead they are returned. - warnings.simplefilter("error", category=SaverFillValueWarning) + warnings.simplefilter( + "error", category=IrisSaverFillValueWarning + ) result_warnings = result.compute() # Either way, we should now have 2 similar warnings. diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py index dc0c29455f..6214f09e7e 100644 --- a/lib/iris/tests/integration/netcdf/test_general.py +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -25,7 +25,7 @@ from iris.coords import CellMethod from iris.cube import Cube, CubeList import iris.exceptions -from iris.fileformats.netcdf import Saver, UnknownCellMethodWarning +from iris.fileformats.netcdf import Saver # Get the netCDF4 module, but in a sneaky way that avoids triggering the "do not import # netCDF4" check in "iris.tests.test_coding_standards.test_netcdf4_import()". @@ -141,7 +141,9 @@ def test_unknown_method(self): warning_messages = [ warn for warn in warning_messages - if isinstance(warn, UnknownCellMethodWarning) + if isinstance( + warn, iris.exceptions.IrisUnknownCellMethodWarning + ) ] self.assertEqual(len(warning_messages), 1) message = warning_messages[0].args[0] diff --git a/lib/iris/tests/integration/netcdf/test_self_referencing.py b/lib/iris/tests/integration/netcdf/test_self_referencing.py index 3395296e11..554fabb4fc 100644 --- a/lib/iris/tests/integration/netcdf/test_self_referencing.py +++ b/lib/iris/tests/integration/netcdf/test_self_referencing.py @@ -16,6 +16,7 @@ import numpy as np import iris +from iris.exceptions import IrisCfMissingVarWarning from iris.fileformats.netcdf import _thread_safe_nc @@ -46,7 +47,9 @@ def test_cmip6_volcello_load_issue_3367(self): with mock.patch("warnings.warn") as warn: # ensure file loads without failure cube = iris.load_cube(self.fname) - warn.assert_has_calls([mock.call(expected_msg)]) + warn.assert_has_calls( + [mock.call(expected_msg, category=IrisCfMissingVarWarning)] + ) # extra check to ensure correct variable was found assert cube.standard_name == "ocean_volume" @@ -113,7 +116,9 @@ def test_self_referencing_load_issue_3367(self): with mock.patch("warnings.warn") as warn: # ensure file loads without failure cube = iris.load_cube(self.temp_dir_path) - warn.assert_called_with(expected_msg) + warn.assert_called_with( + expected_msg, category=IrisCfMissingVarWarning + ) # extra check to ensure correct variable was found assert cube.standard_name == "ocean_volume" diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py index e654694aa7..026bdae58a 100644 --- a/lib/iris/tests/integration/test_pp.py +++ b/lib/iris/tests/integration/test_pp.py @@ -18,7 +18,7 @@ from iris.aux_factory import HybridHeightFactory, HybridPressureFactory from iris.coords import AuxCoord, CellMethod, DimCoord from iris.cube import Cube -from iris.exceptions import IgnoreCubeException +from iris.exceptions import IgnoreCubeException, IrisUserWarning import iris.fileformats.pp from iris.fileformats.pp import load_pairs_from_fields import iris.fileformats.pp_load_rules @@ -290,7 +290,7 @@ def test_hybrid_pressure_with_duplicate_references(self): "iris.fileformats.pp.load", new=load ) as load, mock.patch("warnings.warn") as warn: _, _, _ = iris.fileformats.pp.load_cubes("DUMMY") - warn.assert_called_with(msg) + warn.assert_called_with(msg, category=IrisUserWarning) def test_hybrid_height_with_non_standard_coords(self): # Check the save rules are using the AuxFactory to find the @@ -415,7 +415,7 @@ def test_hybrid_height_round_trip_no_reference(self): "Unable to create instance of HybridHeightFactory. " "The source data contains no field(s) for 'orography'." ) - warn.assert_called_with(msg) + warn.assert_called_with(msg, category=IrisUserWarning) # Check the data cube is set up to use hybrid height. self._test_coord( diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 6cea9dc001..54309e3906 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -8,6 +8,7 @@ # importing anything else import iris.tests as tests # isort:skip +import ast from datetime import datetime from fnmatch import fnmatch from glob import glob @@ -133,6 +134,66 @@ def test_python_versions(): assert search in path.read_text() +def test_categorised_warnings(): + """ + To ensure that all UserWarnings raised by Iris are categorised, for ease of use. + + No obvious category? Use the parent: + :class:`iris.exceptions.IrisUserWarning`. + + Warning matches multiple categories? Create a one-off combo class. For + example: + + .. code-block:: python + + class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): + \""" + One-off combination of warning classes - enhances user filtering. + \""" + pass + + """ + warns_without_category = [] + warns_with_user_warning = [] + tmp_list = [] + + for file_path in Path(IRIS_DIR).rglob("*.py"): + file_text = file_path.read_text() + parsed = ast.parse(source=file_text) + calls = filter(lambda node: hasattr(node, "func"), ast.walk(parsed)) + warn_calls = filter( + lambda c: getattr(c.func, "attr", None) == "warn", calls + ) + + warn_call: ast.Call + for warn_call in warn_calls: + warn_ref = f"{file_path}:{warn_call.lineno}" + tmp_list.append(warn_ref) + + category_kwargs = filter( + lambda k: k.arg == "category", warn_call.keywords + ) + category_kwarg: ast.keyword = next(category_kwargs, None) + + if category_kwarg is None: + warns_without_category.append(warn_ref) + # Work with Attribute or Name instances. + elif ( + getattr(category_kwarg.value, "attr", None) + or getattr(category_kwarg.value, "id", None) + ) == "UserWarning": + warns_with_user_warning.append(warn_ref) + + # This avoids UserWarnings being raised by unwritten default behaviour. + assert ( + warns_without_category == [] + ), "All warnings raised by Iris must be raised with the category kwarg." + + assert ( + warns_with_user_warning == [] + ), "No warnings raised by Iris can be the base UserWarning class." + + class TestLicenseHeaders(tests.IrisTest): @staticmethod def whatchanged_parse(whatchanged_output): diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index 9287a79fda..ec92838466 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -20,6 +20,7 @@ from iris.aux_factory import HybridHeightFactory from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.cube +from iris.exceptions import IrisUserWarning import iris.tests.stock as stock @@ -340,7 +341,8 @@ def test_points_overlap_increasing(self): cubes.append(_make_cube((0, 2), y, 1)) cubes.append(_make_cube((1, 3), y, 2)) with pytest.warns( - UserWarning, match="Found cubes with overlap on concatenate axis" + IrisUserWarning, + match="Found cubes with overlap on concatenate axis", ): result = concatenate(cubes) self.assertEqual(len(result), 2) @@ -351,7 +353,8 @@ def test_points_overlap_decreasing(self): cubes.append(_make_cube(x, (3, 0, -1), 1)) cubes.append(_make_cube(x, (1, -1, -1), 2)) with pytest.warns( - UserWarning, match="Found cubes with overlap on concatenate axis" + IrisUserWarning, + match="Found cubes with overlap on concatenate axis", ): result = concatenate(cubes) self.assertEqual(len(result), 2) @@ -366,7 +369,8 @@ def test_bounds_overlap_increasing(self): ) cubes.append(cube) with pytest.warns( - UserWarning, match="Found cubes with overlap on concatenate axis" + IrisUserWarning, + match="Found cubes with overlap on concatenate axis", ): result = concatenate(cubes) self.assertEqual(len(result), 2) @@ -381,7 +385,8 @@ def test_bounds_overlap_decreasing(self): ) cubes.append(cube) with pytest.warns( - UserWarning, match="Found cubes with overlap on concatenate axis" + IrisUserWarning, + match="Found cubes with overlap on concatenate axis", ): result = concatenate(cubes) self.assertEqual(len(result), 2) diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index 7cd15297cc..2e5aef249c 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -18,6 +18,7 @@ ) import iris.coords import iris.cube +from iris.exceptions import IrisUserWarning import iris.tests.stock @@ -341,7 +342,7 @@ def test_inverse_flattening_change(self): cs = GeogCS(6543210, 6500000) initial_crs = cs.as_cartopy_crs() with self.assertWarnsRegex( - UserWarning, + IrisUserWarning, "Setting inverse_flattening does not affect other properties of the GeogCS object.", ): cs.inverse_flattening = cs.inverse_flattening + 1 diff --git a/lib/iris/tests/test_hybrid.py b/lib/iris/tests/test_hybrid.py index 76fc971a08..b070f36a7a 100644 --- a/lib/iris/tests/test_hybrid.py +++ b/lib/iris/tests/test_hybrid.py @@ -18,6 +18,7 @@ import iris from iris.aux_factory import HybridHeightFactory, HybridPressureFactory +from iris.exceptions import IrisIgnoringBoundsWarning import iris.tests.stock @@ -136,7 +137,7 @@ def test_invalid_dependencies(self): with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") - with self.assertRaises(UserWarning): + with self.assertRaises(IrisIgnoringBoundsWarning): _ = HybridHeightFactory(orography=sigma) def test_bounded_orography(self): @@ -154,7 +155,7 @@ def test_bounded_orography(self): with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") - with self.assertRaisesRegex(UserWarning, msg): + with self.assertRaisesRegex(IrisIgnoringBoundsWarning, msg): self.cube.coord("altitude") @@ -215,7 +216,7 @@ def test_invalid_dependencies(self): with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") - with self.assertRaises(UserWarning): + with self.assertRaises(IrisIgnoringBoundsWarning): _ = HybridPressureFactory( sigma=sigma, surface_air_pressure=sigma ) @@ -235,7 +236,7 @@ def test_bounded_surface_pressure(self): with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") - with self.assertRaisesRegex(UserWarning, msg): + with self.assertRaisesRegex(IrisIgnoringBoundsWarning, msg): self.cube.coord("air_pressure") diff --git a/lib/iris/tests/test_iterate.py b/lib/iris/tests/test_iterate.py index ec86d2f69d..6317ef32b5 100644 --- a/lib/iris/tests/test_iterate.py +++ b/lib/iris/tests/test_iterate.py @@ -22,6 +22,7 @@ import iris import iris.analysis +from iris.exceptions import IrisUserWarning import iris.iterate import iris.tests.stock @@ -365,12 +366,12 @@ def test_izip_different_valued_coords(self): warnings.simplefilter( "error" ) # Cause all warnings to raise Exceptions - with self.assertRaises(UserWarning): + with self.assertRaises(IrisUserWarning): iris.iterate.izip( self.cube_a, self.cube_b, coords=self.coord_names ) # Call with coordinates, rather than names - with self.assertRaises(UserWarning): + with self.assertRaises(IrisUserWarning): iris.iterate.izip( self.cube_a, self.cube_b, coords=[latitude, longitude] ) diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 6438140ed9..2e389942bf 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -26,6 +26,7 @@ from iris._lazy_data import is_lazy_data import iris.analysis.trajectory import iris.coord_systems as icoord_systems +from iris.exceptions import IrisCfSaveWarning from iris.fileformats._nc_load_rules import helpers as ncload_helpers import iris.fileformats.netcdf from iris.fileformats.netcdf import _thread_safe_nc @@ -1099,7 +1100,9 @@ def test_conflicting_global_attributes(self): with self.temp_filename(suffix=".nc") as filename: with mock.patch("warnings.warn") as warn: iris.save([self.cube, self.cube2], filename) - warn.assert_called_with(expected_msg) + warn.assert_called_with( + expected_msg, category=IrisCfSaveWarning + ) self.assertCDL( filename, ("netcdf", "netcdf_save_confl_global_attr.cdl") ) diff --git a/lib/iris/tests/unit/analysis/cartography/test_project.py b/lib/iris/tests/unit/analysis/cartography/test_project.py index 8649cc55ea..c00830aacc 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_project.py +++ b/lib/iris/tests/unit/analysis/cartography/test_project.py @@ -16,6 +16,7 @@ import iris.coord_systems import iris.coords import iris.cube +from iris.exceptions import IrisDefaultingWarning import iris.tests import iris.tests.stock @@ -161,7 +162,8 @@ def test_no_coord_system(self): warn.assert_called_once_with( "Coordinate system of latitude and " "longitude coordinates is not specified. " - "Assuming WGS84 Geodetic." + "Assuming WGS84 Geodetic.", + category=IrisDefaultingWarning, ) diff --git a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py index 49e03a1174..62ab1ae283 100644 --- a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py +++ b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py @@ -21,6 +21,7 @@ from iris.analysis.geometry import geometry_area_weights from iris.coords import DimCoord from iris.cube import Cube +from iris.exceptions import IrisGeometryExceedWarning import iris.tests.stock as stock @@ -148,7 +149,9 @@ def test_distinct_xy_bounds_pole(self): "The geometry exceeds the " "cube's y dimension at the upper end.", ) - self.assertTrue(issubclass(w[-1].category, UserWarning)) + self.assertTrue( + issubclass(w[-1].category, IrisGeometryExceedWarning) + ) target = np.array( [ [0, top_cell_half, top_cell_half, 0], diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 69b6b70c96..c548d017f2 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -19,7 +19,7 @@ import iris from iris.coords import AuxCoord, Coord, DimCoord from iris.cube import Cube -from iris.exceptions import UnitConversionError +from iris.exceptions import IrisVagueMetadataWarning, UnitConversionError from iris.tests.unit.coords import CoordTestMixin Pair = collections.namedtuple("Pair", "points bounds") @@ -482,7 +482,7 @@ def test_numeric_nd_multidim_bounds_warning(self): "Collapsing a multi-dimensional coordinate. " "Metadata may not be fully descriptive for 'y'." ) - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): coord.collapsed() def test_lazy_nd_multidim_bounds_warning(self): @@ -493,7 +493,7 @@ def test_lazy_nd_multidim_bounds_warning(self): "Collapsing a multi-dimensional coordinate. " "Metadata may not be fully descriptive for 'y'." ) - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): coord.collapsed() def test_numeric_nd_noncontiguous_bounds_warning(self): @@ -504,7 +504,7 @@ def test_numeric_nd_noncontiguous_bounds_warning(self): "Collapsing a non-contiguous coordinate. " "Metadata may not be fully descriptive for 'y'." ) - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): coord.collapsed() def test_lazy_nd_noncontiguous_bounds_warning(self): @@ -515,7 +515,7 @@ def test_lazy_nd_noncontiguous_bounds_warning(self): "Collapsing a non-contiguous coordinate. " "Metadata may not be fully descriptive for 'y'." ) - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): coord.collapsed() def test_numeric_3_bounds(self): @@ -530,7 +530,7 @@ def test_numeric_3_bounds(self): r"1D coordinates with 2 bounds. Metadata may not be fully " r"descriptive for 'x'. Ignoring bounds." ) - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): collapsed_coord = coord.collapsed() self.assertFalse(collapsed_coord.has_lazy_points()) @@ -553,7 +553,7 @@ def test_lazy_3_bounds(self): r"1D coordinates with 2 bounds. Metadata may not be fully " r"descriptive for 'x'. Ignoring bounds." ) - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): collapsed_coord = coord.collapsed() self.assertTrue(collapsed_coord.has_lazy_points()) diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index a733665df8..443c9db546 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -40,6 +40,8 @@ AncillaryVariableNotFoundError, CellMeasureNotFoundError, CoordinateNotFoundError, + IrisUserWarning, + IrisVagueMetadataWarning, UnitConversionError, ) import iris.tests.stock as stock @@ -676,7 +678,10 @@ def _assert_warn_collapse_without_weight(self, coords, warn): # Ensure that warning is raised. msg = "Collapsing spatial coordinate {!r} without weighting" for coord in coords: - self.assertIn(mock.call(msg.format(coord)), warn.call_args_list) + self.assertIn( + mock.call(msg.format(coord), category=IrisUserWarning), + warn.call_args_list, + ) def _assert_nowarn_collapse_without_weight(self, coords, warn): # Ensure that warning is not raised. @@ -765,7 +770,10 @@ def _assert_warn_cannot_check_contiguity(self, warn): f"bounds. Metadata may not be fully descriptive for " f"'{coord}'. Ignoring bounds." ) - self.assertIn(mock.call(msg), warn.call_args_list) + self.assertIn( + mock.call(msg, category=IrisVagueMetadataWarning), + warn.call_args_list, + ) def _assert_cube_as_expected(self, cube): """Ensure that cube data and coordinates are as expected.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py index a4e0e05a08..641b6b7b44 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py @@ -20,6 +20,7 @@ import numpy as np import pytest +import iris.exceptions from iris.experimental.ugrid.cf import CFUGridAuxiliaryCoordinateVariable from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, @@ -215,7 +216,10 @@ def test_warn(self): } def operation(warn: bool): - warnings.warn("emit at least 1 warning") + warnings.warn( + "emit at least 1 warning", + category=iris.exceptions.IrisUserWarning, + ) result = CFUGridAuxiliaryCoordinateVariable.identify( vars_all, warn=warn ) @@ -223,7 +227,9 @@ def operation(warn: bool): # Missing warning. warn_regex = rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns( + iris.exceptions.IrisCfMissingVarWarning, match=warn_regex + ): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -235,7 +241,9 @@ def operation(warn: bool): vars_all[subject_name] = netcdf_ugrid_variable( subject_name, "", np.bytes_ ) - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns( + iris.exceptions.IrisCfLabelVarWarning, match=warn_regex + ): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py index 27d5c1db90..5a68a8c03f 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py @@ -20,6 +20,7 @@ import numpy as np import pytest +import iris.exceptions from iris.experimental.ugrid.cf import CFUGridConnectivityVariable from iris.experimental.ugrid.mesh import Connectivity from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( @@ -204,7 +205,10 @@ def test_warn(self): } def operation(warn: bool): - warnings.warn("emit at least 1 warning") + warnings.warn( + "emit at least 1 warning", + category=iris.exceptions.IrisUserWarning, + ) result = CFUGridConnectivityVariable.identify(vars_all, warn=warn) self.assertDictEqual({}, result) @@ -212,7 +216,9 @@ def operation(warn: bool): warn_regex = ( rf"Missing CF-UGRID connectivity variable {subject_name}.*" ) - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns( + iris.exceptions.IrisCfMissingVarWarning, match=warn_regex + ): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -224,7 +230,9 @@ def operation(warn: bool): vars_all[subject_name] = netcdf_ugrid_variable( subject_name, "", np.bytes_ ) - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns( + iris.exceptions.IrisCfLabelVarWarning, match=warn_regex + ): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py index 6b278cf1b1..8302c30177 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py @@ -20,6 +20,7 @@ import numpy as np import pytest +import iris.exceptions from iris.experimental.ugrid.cf import CFUGridMeshVariable from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, @@ -247,13 +248,18 @@ def test_warn(self): } def operation(warn: bool): - warnings.warn("emit at least 1 warning") + warnings.warn( + "emit at least 1 warning", + category=iris.exceptions.IrisUserWarning, + ) result = CFUGridMeshVariable.identify(vars_all, warn=warn) self.assertDictEqual({}, result) # Missing warning. warn_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns( + iris.exceptions.IrisCfMissingVarWarning, match=warn_regex + ): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -265,7 +271,9 @@ def operation(warn: bool): vars_all[subject_name] = netcdf_ugrid_variable( subject_name, "", np.bytes_ ) - with pytest.warns(UserWarning, match=warn_regex): + with pytest.warns( + iris.exceptions.IrisCfLabelVarWarning, match=warn_regex + ): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index cec4f53bc3..16943c0c15 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -15,7 +15,7 @@ import numpy as np -from iris.exceptions import NotYetImplementedError +from iris.exceptions import IrisLoadWarning, NotYetImplementedError import iris.fileformats._ff as ff from iris.fileformats._ff import FF2PP import iris.fileformats.pp as pp @@ -467,7 +467,7 @@ def test_unequal_spacing_eitherside(self): with mock.patch("warnings.warn") as warn: result = ff2pp._det_border(field_x, None) - warn.assert_called_with(msg) + warn.assert_called_with(msg, category=IrisLoadWarning) self.assertIs(result, field_x) def test_increasing_field_values(self): diff --git a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py index 6a65397086..72d522ec85 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py @@ -14,7 +14,7 @@ import numpy as np -from iris.fileformats._ff import FFHeader +from iris.fileformats._ff import FFHeader, _WarnComboLoadingDefaulting MyGrid = collections.namedtuple("MyGrid", "column row real horiz_grid_type") @@ -60,7 +60,8 @@ def test_unknown(self): grid = header.grid() warn.assert_called_with( "Staggered grid type: 0 not currently" - " interpreted, assuming standard C-grid" + " interpreted, assuming standard C-grid", + category=_WarnComboLoadingDefaulting, ) self.assertIs(grid, mock.sentinel.grid) diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py index ded635984c..624837c19d 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py @@ -15,6 +15,7 @@ from unittest import mock import iris.coords +from iris.exceptions import IrisLoadWarning from iris.fileformats.name_loaders import _build_cell_methods @@ -104,7 +105,7 @@ def test_unrecognised(self): "Unknown {} statistic: {!r}. Unable to " "create cell method.".format(coord_name, unrecognised_heading) ) - warn.assert_called_with(expected_msg) + warn.assert_called_with(expected_msg, category=IrisLoadWarning) def test_unrecognised_similar_to_no_averaging(self): unrecognised_headings = [ @@ -129,7 +130,7 @@ def test_unrecognised_similar_to_no_averaging(self): "Unknown {} statistic: {!r}. Unable to " "create cell method.".format(coord_name, unrecognised_heading) ) - warn.assert_called_with(expected_msg) + warn.assert_called_with(expected_msg, category=IrisLoadWarning) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 399a987f11..38882810d2 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -12,6 +12,7 @@ import tempfile import warnings +from iris.exceptions import IrisLoadWarning import iris.fileformats._nc_load_rules.engine from iris.fileformats.cf import CFReader import iris.fileformats.netcdf @@ -138,7 +139,7 @@ def run_testcase(self, warning_regex=None, **testcase_kwargs): if warning_regex is None: context = self.assertNoWarningsRegexp() else: - context = self.assertWarnsRegex(UserWarning, warning_regex) + context = self.assertWarnsRegex(IrisLoadWarning, warning_regex) with context: cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index 729a2d8b14..9935a6e5ae 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -15,6 +15,7 @@ from unittest import mock from iris.coords import CellMethod +from iris.exceptions import IrisCfLoadWarning from iris.fileformats._nc_load_rules.helpers import parse_cell_methods @@ -123,7 +124,7 @@ def test_comment_bracket_mismatch_warning(self): ] for cell_method_str in cell_method_strings: with self.assertWarns( - UserWarning, + IrisCfLoadWarning, msg="Cell methods may be incorrectly parsed due to mismatched brackets", ): _ = parse_cell_methods(cell_method_str) @@ -139,7 +140,7 @@ def test_badly_formatted_warning(self): ] for cell_method_str in cell_method_strings: with self.assertWarns( - UserWarning, + IrisCfLoadWarning, msg=f"Failed to fully parse cell method string: {cell_method_str}", ): _ = parse_cell_methods(cell_method_str) diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py index 841935cc81..c15c8737fd 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py @@ -16,6 +16,7 @@ from iris.coords import DimCoord from iris.cube import Cube +from iris.exceptions import IrisFactoryCoordNotFoundWarning from iris.fileformats.netcdf.loader import _load_aux_factory @@ -165,7 +166,8 @@ def test_formula_terms_ap_missing_coords(self): with mock.patch("warnings.warn") as warn: _load_aux_factory(self.engine, self.cube) warn.assert_called_once_with( - "Unable to find coordinate for variable " "'ap'" + "Unable to find coordinate for variable " "'ap'", + category=IrisFactoryCoordNotFoundWarning, ) self._check_no_delta() diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 12af318c01..af0d7bcd30 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -31,6 +31,7 @@ ) from iris.coords import AuxCoord, DimCoord from iris.cube import Cube +from iris.exceptions import IrisMaskValueMatchWarning from iris.fileformats.netcdf import Saver, _thread_safe_nc import iris.tests.stock as stock @@ -555,7 +556,7 @@ def test_contains_fill_value_passed(self): cube = self._make_cube(">f4") fill_value = 1 with self.assertWarnsRegex( - UserWarning, + IrisMaskValueMatchWarning, "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube, fill_value=fill_value): @@ -567,7 +568,7 @@ def test_contains_fill_value_byte(self): cube = self._make_cube(">i1") fill_value = 1 with self.assertWarnsRegex( - UserWarning, + IrisMaskValueMatchWarning, "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube, fill_value=fill_value): @@ -579,7 +580,7 @@ def test_contains_default_fill_value(self): cube = self._make_cube(">f4") cube.data[0, 0] = _thread_safe_nc.default_fillvals["f4"] with self.assertWarnsRegex( - UserWarning, + IrisMaskValueMatchWarning, "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index 9686c88abf..da5f2d88fa 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -18,6 +18,7 @@ import numpy as np import pytest +from iris.exceptions import IrisMaskValueMatchWarning import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc from iris.fileformats.netcdf.saver import Saver, _FillvalueCheckInfo @@ -183,5 +184,5 @@ def test_warnings(self, compute, data_form): if n_expected_warnings > 0: warning = issued_warnings[0] msg = "contains unmasked data points equal to the fill-value, 2.0" - assert isinstance(warning, UserWarning) + assert isinstance(warning, IrisMaskValueMatchWarning) assert msg in warning.args[0] diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py index b2e4b63e3a..317f75bb8c 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -11,9 +11,9 @@ import numpy as np import pytest +from iris.exceptions import IrisSaverFillValueWarning from iris.fileformats.netcdf._thread_safe_nc import default_fillvals from iris.fileformats.netcdf.saver import ( - SaverFillValueWarning, _fillvalue_report, _FillvalueCheckInfo, ) @@ -93,12 +93,14 @@ def test_warn(self, has_collision): expected_msg = "'' contains unmasked data points equal to the fill-value" # Enter a warnings context that checks for the error. warning_context = pytest.warns( - SaverFillValueWarning, match=expected_msg + IrisSaverFillValueWarning, match=expected_msg ) warning_context.__enter__() else: # Check that we get NO warning of the expected type. - warnings.filterwarnings("error", category=SaverFillValueWarning) + warnings.filterwarnings( + "error", category=IrisSaverFillValueWarning + ) # Do call: it should raise AND return a warning, ONLY IF there was a collision. result = _fillvalue_report( diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py index 44dcf8ac48..2279bcffc3 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py @@ -63,7 +63,7 @@ def test_unhandled(self): vertical_coord_val=1.0, vertical_coord_type=-1 ) warn.assert_called_once_with( - "Vertical coord -1 not yet handled", TranslationWarning + "Vertical coord -1 not yet handled", category=TranslationWarning ) def test_null(self): diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index 316894ded1..f2bbf97a80 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -13,6 +13,7 @@ import numpy as np +from iris.exceptions import IrisDefaultingWarning, IrisMaskValueMatchWarning import iris.fileformats.pp as pp from iris.fileformats.pp import PPField, SplittableInt @@ -91,7 +92,7 @@ def field_checksum(data): data_64 = np.linspace(0, 1, num=10, endpoint=False).reshape(2, 5) checksum_32 = field_checksum(data_64.astype(">f4")) msg = "Downcasting array precision from float64 to float32 for save." - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisDefaultingWarning, msg): checksum_64 = field_checksum(data_64.astype(">f8")) self.assertEqual(checksum_32, checksum_64) @@ -104,7 +105,7 @@ def test_masked_mdi_value_warning(self): [1.0, field.bmdi, 3.0], dtype=np.float32 ) msg = "PPField data contains unmasked points" - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): with self.temp_filename(".pp") as temp_filename: with open(temp_filename, "wb") as pp_file: field.save(pp_file) @@ -116,7 +117,7 @@ def test_unmasked_mdi_value_warning(self): # Make float32 data, as float64 default produces an extra warning. field.data = np.array([1.0, field.bmdi, 3.0], dtype=np.float32) msg = "PPField data contains unmasked points" - with self.assertWarnsRegex(UserWarning, msg): + with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): with self.temp_filename(".pp") as temp_filename: with open(temp_filename, "wb") as pp_file: field.save(pp_file) From f05404766bb24d234d5bd7003d372e1c68468aa0 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Fri, 22 Sep 2023 12:44:40 +0100 Subject: [PATCH 054/134] Replaced pkg_resources version parser with packager version parser. (#5511) * removed pkg_resources version parse references * Corrected pull num * Fix What's New indentation * Fix What's New indentation. --------- Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 5 +++-- .../tests/unit/experimental/ugrid/mesh/test_Connectivity.py | 4 ++-- .../tests/unit/experimental/ugrid/mesh/test_MeshCoord.py | 4 ++-- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 4c732c43df..b342a51a01 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -84,8 +84,9 @@ This document explains the changes made to Iris for this release working properly. (Main pull request: :pull:`5437`, more detail: :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) -#. `@acchamber`_ removed several warnings from iris related to Numpy 1.25 deprecations. - (:pull:`5493`) +#. `@acchamber`_ and `@ESadek-MO`_ resolved several deprecation to reduce + number of warnings raised during tests. + (:pull:`5493`, :pull:`5511`) #. `@trexfeathers`_ replaced all uses of the ``logging.WARNING`` level, in favour of using Python warnings, following team agreement. (:pull:`5488`) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index f343f4be24..7e90555801 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -14,7 +14,7 @@ import numpy as np from numpy import ma -from pkg_resources import parse_version +from packaging import version from iris._lazy_data import as_lazy_data, is_lazy_data from iris.experimental.ugrid.mesh import Connectivity @@ -63,7 +63,7 @@ def test_indices(self): def test_read_only(self): attributes = ("indices", "cf_role", "start_index", "location_axis") - if parse_version(python_version()) >= parse_version("3.11"): + if version.parse(python_version()) >= version.parse("3.11"): msg = "object has no setter" else: msg = "can't set attribute" diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index cb90c176b6..ba7306bded 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -17,7 +17,7 @@ import dask.array as da import numpy as np -from pkg_resources import parse_version +from packaging import version import pytest from iris._lazy_data import as_lazy_data, is_lazy_data @@ -79,7 +79,7 @@ def setUp(self): def test_fixed_metadata(self): # Check that you cannot set any of these on an existing MeshCoord. meshcoord = self.meshcoord - if parse_version(python_version()) >= parse_version("3.11"): + if version.parse(python_version()) >= version.parse("3.11"): msg = "object has no setter" else: msg = "can't set attribute" From d923f343f59a20bad1c0273d44f47716d8bd6838 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 25 Sep 2023 08:54:40 +0100 Subject: [PATCH 055/134] Docs page on filtering warnings (#5509) * Docs page on filtering warnings. * What's New entry. * Warnings doctest relative path. * Corrections to command line warning filtering. * Typo. --- .../src/further_topics/filtering_warnings.rst | 271 ++++++++++++++++++ docs/src/userguide/index.rst | 1 + docs/src/whatsnew/latest.rst | 3 +- 3 files changed, 274 insertions(+), 1 deletion(-) create mode 100644 docs/src/further_topics/filtering_warnings.rst diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst new file mode 100644 index 0000000000..689ea69a52 --- /dev/null +++ b/docs/src/further_topics/filtering_warnings.rst @@ -0,0 +1,271 @@ +.. _filtering-warnings: + +================== +Filtering Warnings +================== + +Since Iris cannot predict your specific needs, it by default raises Warnings +for anything that might be a problem for **any** user, and is designed to work with +you to ``ignore`` Warnings which you do not find helpful. + +.. testsetup:: filtering_warnings + + from pathlib import Path + import sys + import warnings + + import iris + import iris.coord_systems + import iris.exceptions + + # Hack to ensure doctests actually see Warnings that are raised, and that + # they have a relative path (so a test pass is not machine-dependent). + warnings.filterwarnings("default") + IRIS_FILE = Path(iris.__file__) + def custom_warn(message, category, filename, lineno, file=None, line=None): + filepath = Path(filename) + filename = str(filepath.relative_to(IRIS_FILE.parents[1])) + sys.stdout.write(warnings.formatwarning(message, category, filename, lineno)) + warnings.showwarning = custom_warn + + geog_cs_globe = iris.coord_systems.GeogCS(6400000) + orthographic_coord_system = iris.coord_systems.Orthographic( + longitude_of_projection_origin=0, + latitude_of_projection_origin=0, + ellipsoid=geog_cs_globe, + ) + + + def my_operation(): + geog_cs_globe.inverse_flattening = 0.1 + _ = orthographic_coord_system.as_cartopy_crs() + +Here is a hypothetical operation - ``my_operation()`` - which raises two +Warnings: + +.. doctest:: filtering_warnings + + >>> my_operation() + ... + iris/coord_systems.py:454: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) + iris/coord_systems.py:821: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + warnings.warn( + +Warnings can be suppressed using the Python warnings filter with the ``ignore`` +action. Detailed information is available in the Python documentation: +:external+python:mod:`warnings`. + +The key points are: + +- :ref:`When`: a warnings filter can be applied + either from the command line or from within Python. +- :ref:`What`: a warnings filter accepts + various arguments to specify which Warnings are being filtered. Both broad + and narrow filters are possible. + +.. _warning-filter-application: + +**When** a Warnings Filter can be Applied +----------------------------------------- + +- **Command line:** setting the :external+python:envvar:`PYTHONWARNINGS` + environment variable. +- **Command line:** the `python -W `_ + command line argument. +- **Within Python:** use :func:`warnings.filterwarnings` . + +The :ref:`warning-filter-specificity` section demonstrates using +:func:`warnings.filterwarnings`, and shows the equivalent **command line** +approaches. + + +.. _warning-filter-specificity: + +**What** Warnings will be Filtered +---------------------------------- + +.. note:: + + For all of these examples we are using the + :class:`~warnings.catch_warnings` context manager to ensure any changes to + settings are temporary. + + This should always work fine for the ``ignore`` + warning filter action, but note that some of the other actions + may not behave correctly with all Iris operations, as + :class:`~warnings.catch_warnings` is not thread-safe (e.g. using the + ``once`` action may cause 1 warning per chunk of lazy data). + +Specific Warnings +~~~~~~~~~~~~~~~~~ + +**When you do not want a specific warning, but still want all others.** + +You can target specific Warning messages, e.g. + +.. doctest:: filtering_warnings + + >>> with warnings.catch_warnings(): + ... warnings.filterwarnings("ignore", message="Discarding false_easting") + ... my_operation() + ... + iris/coord_systems.py:454: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) + +:: + + python -W ignore:"Discarding false_easting" + export PYTHONWARNINGS=ignore:"Discarding false_easting" + +---- + +Or you can target Warnings raised by specific lines of specific modules, e.g. + +.. doctest:: filtering_warnings + + >>> with warnings.catch_warnings(): + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=454) + ... my_operation() + ... + iris/coord_systems.py:821: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + warnings.warn( + +:: + + python -W ignore:::iris.coord_systems:454 + export PYTHONWARNINGS=ignore:::iris.coord_systems:454 + +Warnings from a Common Source +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +**When you do not want ANY warnings raised by a module, or collection of +modules.** + +E.g. filtering the ``coord_systems`` module: + +.. doctest:: filtering_warnings + + >>> with warnings.catch_warnings(): + ... warnings.filterwarnings("ignore", module="iris.coord_systems") + ... my_operation() + +:: + + python -W ignore:::iris.coord_systems + export PYTHONWARNINGS=ignore:::iris.coord_systems + +---- + +If using :func:`warnings.filterwarnings` , you can also use partial +definitions. The below example will ``ignore`` all Warnings from ``iris`` as a +whole. + +.. doctest:: filtering_warnings + + >>> with warnings.catch_warnings(): + ... warnings.filterwarnings("ignore", module="iris") + ... my_operation() + +The above 'partial' filter is not available with the command line approaches. + +Warnings of a Common Type +~~~~~~~~~~~~~~~~~~~~~~~~~ + +**When you do not want any Warnings of the same nature, from anywhere in the +code you are calling.** + +The below example will ``ignore`` any +:class:`~iris.exceptions.IrisDefaultingWarning` that gets raised by *any* +module during execution: + +.. doctest:: filtering_warnings + + >>> with warnings.catch_warnings(): + ... warnings.filterwarnings( + ... "ignore", + ... category=iris.exceptions.IrisDefaultingWarning + ... ) + ... my_operation() + ... + iris/coord_systems.py:454: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) + +---- + +Using :class:`~iris.exceptions.IrisUserWarning` in the filter will ``ignore`` +both Warnings, since :class:`~iris.exceptions.IrisDefaultingWarning` subclasses +:class:`~iris.exceptions.IrisUserWarning` : + +.. doctest:: filtering_warnings + + >>> with warnings.catch_warnings(): + ... warnings.filterwarnings( + ... "ignore", + ... category=iris.exceptions.IrisUserWarning + ... ) + ... my_operation() + +---- + +The command line approaches can only handle the built-in Warning +categories (`cpython#66733`_):: + + python -W ignore::UserWarning + export PYTHONWARNINGS=ignore::UserWarning + +---- + +There are several built-in Python warning categories that can be used here +(:class:`DeprecationWarning` being a popular example, see +:external+python:mod:`warnings` for more). Since Iris has +so many different warnings that might be raised, Iris subclasses +:class:`UserWarning` to :class:`~iris.exceptions.IrisUserWarning`, which itself +has **many** specialised subclasses. These subclasses exist to give you more +granularity in your warning filtering; you can see the full list by +searching the :mod:`iris.exceptions` page for ``warning`` . + +.. attention:: + + If you have ideas for adding/altering Iris' warning categories, please + :ref:`get in touch`! The categories exist to + make your life easier, and it is simple to make modifications. + + +More Detail +----------- + +Different people use Iris for very different purposes, from quick file +visualisation to extract-transform-load to statistical analysis. These +contrasting priorities mean disagreement on which Iris problems can be ignored +and which are critically important. + +For problems that prevent Iris functioning: **Concrete Exceptions** are raised, which +stop code from running any further - no debate here. For less catastrophic +problems: **Warnings** are raised, +which notify you (in ``stderr``) but allow code to continue running. The Warnings are +there because Iris may **OR may not** function in the way you expect, +depending on what you need - e.g. a problem might prevent data being saved to +NetCDF, but statistical analysis will still work fine. + +Examples of Iris Warnings +~~~~~~~~~~~~~~~~~~~~~~~~~ + +- If you attempt to plot un-bounded point data as a ``pcolormesh``: Iris will + guess appropriate bounds around each point so that quadrilaterals can be + plotted. This permanently modifies the relevant coordinates, so the you are + warned in case downstream operations assume un-bounded coordinates. +- If you load a NetCDF file where a CF variable references another variable - + e.g. ``my_var:coordinates = "depth_var" ;`` - but the referenced variable + (``depth_var``) is not in the file: Iris will still construct + its data model, but without this reference relationship. You are warned since + the file includes an error and the loaded result might therefore not be as + expected. + + +.. testcleanup:: filtering_warnings + + warnings.filterwarnings("ignore") + + +.. _cpython#66733: https://github.com/python/cpython/issues/66733 diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst index 771aa450a3..c87323da8e 100644 --- a/docs/src/userguide/index.rst +++ b/docs/src/userguide/index.rst @@ -42,6 +42,7 @@ they may serve as a useful reference for future exploration. :maxdepth: 2 :caption: Further Topics + ../further_topics/filtering_warnings ../further_topics/metadata ../further_topics/lenient_metadata ../further_topics/lenient_maths diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index b342a51a01..b4396ad0d5 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -72,7 +72,8 @@ This document explains the changes made to Iris for this release 📚 Documentation ================ -#. N/A +#. `@trexfeathers`_ documented the intended use of warnings filtering with + Iris. See :ref:`filtering-warnings`. (:pull:`5509`) 💼 Internal From 78ec427d9069073b92addd29d53f51afd3b3b566 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Tue, 26 Sep 2023 10:29:13 +0100 Subject: [PATCH 056/134] Updated environment lockfiles (#5513) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 87 +++++++++++++------------ requirements/locks/py311-linux-64.lock | 83 ++++++++++++------------ requirements/locks/py39-linux-64.lock | 89 +++++++++++++------------- 3 files changed, 128 insertions(+), 131 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index efa5431823..ac9c50ade5 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: bc0383c4702650016b286d9f1d4b405e53a5c85772e04622e3c10fd51465463a +# input_hash: 94966cd7393527bff211c87589678b2ffe1697705267a20b2708a4cc27da5376 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,14 +9,14 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_1.conda#acfb4817400db5804030a3a7ef7909a1 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-3_cp310.conda#4eb33d14d794b0f4be116443ffed3853 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda#26322ec5d7712c3ded99dd656142b8ce https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_1.conda#8bb001683321dcbde117a7337b5aace7 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda#e2042154faafe61969556f28bade94b9 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_1.conda#ff8999574b465089ba0aa25a5e865bd0 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 @@ -32,11 +32,11 @@ https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_1.conda#a0d27fd5c6f05aa45e9602b1db49581c +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d @@ -45,13 +45,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -74,7 +74,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_1.conda#394218a92951499aed2ab1bafb30b570 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -101,7 +101,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 @@ -124,7 +124,7 @@ https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.cond https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py310hc6cd4ac_0.conda#d1157aba60e67df614438afd5cd53564 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py310hc6cd4ac_2.conda#51af196e0a8d2f253b695f9a63d53f4b https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 @@ -132,41 +132,41 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.1-pyh1a96a4e_0.conda#d69753ff6ee3c84a6638921dd95db662 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6c15284_3.conda#06f97c8b69157d91993af0c4f2e16bdc https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_0.conda#741385a84f6a1b6623eb39226cc669e8 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda#b8d67603d43b23ce7e988a5d81a7ab79 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda#9ab79924a3760f85a799f21bc99bd655 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_0.conda#5597d9f9778af6883ae64f0e7d39416c -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda#b74e07a054c479e45a83a83fc5be713c +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hd41b1e2_1.conda#39cac2febd98523afe9ed8e33c22f67d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h1fa729e_0.conda#b0f0a014fc04012c05f39df15fe270ce +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h2372a71_1.conda#cb25177acf28cc35cfa6c1ac1c679e22 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py310h2372a71_0.conda#75e60ce53c01a121039b3050c9e1f759 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py310h2372a71_1.conda#708d39474e54ba0f1832afb551633338 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_0.conda#511120451bf728d52bb37c73d4069e57 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_1.conda#bb010e368de4940771368bc3dc4c63e7 https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e @@ -177,22 +177,22 @@ https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f55 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_0.conda#dfb49d3ac440e1a236080f9c300e642f +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda#b23e0147fa5f7a9380e06334c7266ad5 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h2372a71_1.conda#cd944e0f7a4d1529c6707bd65423bd64 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.1-py310h2372a71_0.conda#0834a720fe60f511913ac52cd01e40dc +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h2fee648_5.conda#fef75d6c60d8a1cc7e6d1707f470a4e2 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.1-py310h2372a71_1.conda#d0d56bc5723b41cc1af97e2b78c54b9b https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_0.conda#4efe3a76fe724778a7235a2046b53233 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py310h2372a71_0.conda#1f18231ffab82f236ce074b2aaa07e54 https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 @@ -201,29 +201,29 @@ https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c7 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h74d50f4_7.conda#3453ac94a99ad9daf17e8a313d274567 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3abc6e6ab60fa404c23094f5a03ec9b https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py310h582fbeb_0.conda#adcc7ea52e4d39d0a93f6a2ef36c7fd4 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py310h29da1c1_1.conda#8e93b1c69cddf89fd412178d3d418bae https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py310hc6cd4ac_0.conda#be1a7e420b7bac4ee02353d0e3161918 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py310hc6cd4ac_1.conda#c7936ec7db24bb913671a1bc5eb2b79d https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_1.conda#483fe58e14ba244110cd1be2b771b70f +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.5-pyhd8ed1ab_0.conda#3bda70bbeb2920f44db5375af2e5fe38 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_0.conda#882a66517c52cae2719ac25308f61316 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py310ha4c1d20_0.conda#1ac91334ffc1f3fd297319cd1c74b34e +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py310hb13e2d6_0.conda#ac3b67e928cc71548efad9b522d42fef https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py310h24ef57a_1.conda#a689e86d7bbab67f889fc384aa72b088 -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py310hc6cd4ac_4.conda#345beb10601d5360a15c033d68165a4f +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310h32c33b7_1.conda#453939b9853937e80c94e9eb3da75981 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py310hc6cd4ac_5.conda#ef5333594a958b25912002886b82b253 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b @@ -232,11 +232,11 @@ https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.ta https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py310hd41b1e2_0.conda#e00d52a8657a79b0a7c8c10559784759 https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-h8e1006c_1.conda#98206c865fccdea9723f0c6f9241a24f +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_0.conda#cd758f0e1d30ada1c320be50767dd55e https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py310hcc13569_0.conda#6c92da4ec4e301d09a365c0584e632c8 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py310hcc13569_0.conda#ae976997aad218dc467d206ded2c8d8e https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py310hb13e2d6_1.conda#4f522fc9cb8ecfa25e39f5c2ea65b16b @@ -249,7 +249,7 @@ https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.cond https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py310h62c0568_0.conda#0ba9c5af7a6cd0244a8ae2038c89317f +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py310h62c0568_1.conda#d6fe03e8d9e80b6e20b7ae60bf4f88b0 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_102.conda#6025039727a049ab4c0f2aab842c01cb https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d @@ -258,9 +258,9 @@ https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.c https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_4.conda#db878a0696f9a7980171fd3cf29cca22 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py310hff52083_0.conda#6c194758494847c927ad3bcf37fafa49 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.0-pyhd8ed1ab_0.conda#16cff214435f2a8163fbe67db9eafb96 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_5.conda#f4fe7a6e3d7c78c9de048ea9dda21690 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py310hff52083_1.conda#8cc0628c9703cf5c8404465e838cc5ae +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 @@ -270,4 +270,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 - diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index ff6f4ff76c..2d41a7dd3f 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: e651bbc39258b157d888d85ff878fc5c3d62cc9a632c7b8bf515b62cd8d2da53 +# input_hash: 40113e38fffa3a31ce64e60231c756c740914d9f0444edaeecd07e598851abc8 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,14 +9,14 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_1.conda#acfb4817400db5804030a3a7ef7909a1 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-3_cp311.conda#c2e2630ddb68cf52eec74dc7dfab20b5 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda#d786502c97404c94d7d58d258a445a65 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_1.conda#8bb001683321dcbde117a7337b5aace7 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda#e2042154faafe61969556f28bade94b9 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_1.conda#ff8999574b465089ba0aa25a5e865bd0 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 @@ -32,11 +32,11 @@ https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_1.conda#a0d27fd5c6f05aa45e9602b1db49581c +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d @@ -45,13 +45,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -74,7 +74,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_1.conda#394218a92951499aed2ab1bafb30b570 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -101,7 +101,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.5-hab00c5b_0_cpython.conda#f0288cb82594b1cbc71111d1cd3c5422 @@ -124,7 +124,7 @@ https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.cond https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py311hb755f60_0.conda#81d4eacf7eb2d40beee33aa71e8f94ad +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py311hb755f60_2.conda#35878142ccd98f076942b40ffb3cfbab https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 @@ -132,41 +132,41 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.1-pyh1a96a4e_0.conda#d69753ff6ee3c84a6638921dd95db662 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6c15284_3.conda#06f97c8b69157d91993af0c4f2e16bdc https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_0.conda#f53903649188b99e6b44c560c69f5b23 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda#9ab79924a3760f85a799f21bc99bd655 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_0.conda#9904dc4adb5d547cb21e136f98cb24b0 -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_1.conda#71120b5155a0c500826cf81536721a15 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311h9547e67_1.conda#079d914d7d7341663e30823c10083c0f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h2582759_0.conda#a90f8e278c1cd7064b2713e6b7db87e6 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h459d7ec_1.conda#490d7fa8675afd1aa6f1b2332d156a45 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py311h459d7ec_0.conda#87b306459b81b7a7aaad37222d537a4f +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py311h459d7ec_1.conda#7e2181758f84a9c7e776af10fbb2f1a0 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py311h459d7ec_0.conda#30eaaf31141e785a445bf1ede6235fe3 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py311h459d7ec_1.conda#52719a74ad130de8fb5d047dc91f247a https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e @@ -177,7 +177,7 @@ https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f55 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_0.conda#7d9a31416c18704f55946ff7cf8da5dc +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_1.conda#a700fcb5cedd3e72d0c75d095c7a6eda https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 @@ -185,13 +185,13 @@ https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.1-py311h459d7ec_0.conda#d23df37f3a595e8ffca99642ab6df3eb +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311hb3a22ac_5.conda#75d52ef1d318d18e554aadd13ce91b9d +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.1-py311h459d7ec_1.conda#bb0e424cb11a7e86700d0bf69e24faec https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_0.conda#5c416db47b7816e437eaf0d46e5c3a3d https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py311h459d7ec_0.conda#fc327c0ea015db3b6484eabb37d44e60 https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 @@ -200,29 +200,29 @@ https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c7 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h74d50f4_7.conda#3453ac94a99ad9daf17e8a313d274567 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3abc6e6ab60fa404c23094f5a03ec9b https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py311h0b84326_0.conda#4b24acdc1fbbae9da03147e7d2cf8c8a +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py311h8aef010_1.conda#4d66ee2081a7cd444ff6f30d95873eef https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py311hb755f60_0.conda#17d25ab64a32872b349579fdb07bbdb2 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py311hb755f60_1.conda#e09eb6aad3607fb6f2c071a2c6a26e1d https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_1.conda#483fe58e14ba244110cd1be2b771b70f +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.5-pyhd8ed1ab_0.conda#3bda70bbeb2920f44db5375af2e5fe38 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_0.conda#882a66517c52cae2719ac25308f61316 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py311h64a7726_0.conda#bf16a9f625126e378302f08e7ed67517 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py311ha169711_1.conda#92633556d37e88ce45193374d408072c -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py311hb755f60_4.conda#3cff4c98f775ff6439b95bb7917702e9 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_1.conda#2e48207ebbd3e25ad14881a27e51fa1e +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py311hb755f60_5.conda#e4d262cc3600e70b505a6761d29f6207 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b @@ -231,11 +231,11 @@ https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.ta https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py311h9547e67_0.conda#db5b3b0093d0d4565e5c89578108402e https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-h8e1006c_1.conda#98206c865fccdea9723f0c6f9241a24f +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_0.conda#cd758f0e1d30ada1c320be50767dd55e https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py311h320fe9a_0.conda#7f35501e126df510b250ad893482ef45 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py311h320fe9a_0.conda#1692362ba82f0556099f0143f7842de3 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py311h64a7726_1.conda#58af16843fc4469770bdbaf45d3a19de @@ -248,7 +248,7 @@ https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.cond https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py311h54ef318_0.conda#b67672c2f39ef2912a1814e29e42c7ca +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py311h54ef318_1.conda#20d79e2fe53b49b399f3d36977b05abb https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311he8ad708_102.conda#b48083ba918347f30efa94f7dc694919 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 @@ -257,9 +257,9 @@ https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.c https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_4.conda#afe5363b88d2e97266063558a6599bd0 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py311h38be061_0.conda#8148b139a0560666d661cf1d179a0cca -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.0-pyhd8ed1ab_0.conda#16cff214435f2a8163fbe67db9eafb96 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_5.conda#ec7e45bc76d9d0b69a74a2075932b8e8 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py311h38be061_1.conda#6a2cd22264c8a61c8a571bb6e524775f +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 @@ -269,4 +269,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 - diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index fc574db4f3..fac192e99b 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 8b81c2e9972c5059e1b9013a49eddbd4697c92807d6f5d5282350b6c6d0dc518 +# input_hash: cc8b627bc99f75128e66e8d5f19fad191f76de7f27898db96e0eef7d6dc6e83a @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,14 +9,14 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_1.conda#acfb4817400db5804030a3a7ef7909a1 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda#0dd193187d54e585cac7eab942a8847e +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-4_cp39.conda#bfe4b3259a8ac6cdf0037752904da6a7 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_1.conda#8bb001683321dcbde117a7337b5aace7 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda#e2042154faafe61969556f28bade94b9 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_1.conda#ff8999574b465089ba0aa25a5e865bd0 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 @@ -32,11 +32,11 @@ https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_1.conda#a0d27fd5c6f05aa45e9602b1db49581c +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d @@ -45,13 +45,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -74,7 +74,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_1.conda#394218a92951499aed2ab1bafb30b570 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 @@ -101,7 +101,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 @@ -124,7 +124,7 @@ https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.cond https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py39h3d6467e_0.conda#f90bb794d0f7463fbe28596796aa0100 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py39h3d6467e_2.conda#3e27ec02c612b1580e94fe8468c28040 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f @@ -132,41 +132,41 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.1-pyh1a96a4e_0.conda#d69753ff6ee3c84a6638921dd95db662 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6c15284_3.conda#06f97c8b69157d91993af0c4f2e16bdc https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_0.conda#3822b0ae733e022c10469c0e46bdddc4 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_1.conda#c9f74d717e5a2847a9f8b779c54130f2 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda#9ab79924a3760f85a799f21bc99bd655 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_0.conda#9c858d105816f454c6b64f3e19184b60 -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_1.conda#ee2b4665b852ec6ff2758f3c1b91233d +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h7633fee_1.conda#45d24492a2acf7d8ad8c4de6491a4fe2 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39h72bdee0_0.conda#1d54d3a75c3192ab7655d9c3d16809f1 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39hd1e30aa_1.conda#c2e412b0f11e5983bcfc35d9beb91ecb https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py39hd1e30aa_0.conda#41841cc1d7387bb7a30cdde4d88afbf4 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py39hd1e30aa_1.conda#6295f6164ced8634e3d95cd5d08ad95d https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py39hd1e30aa_0.conda#ccecb3196b3678e9b5fc8441d681c203 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py39hd1e30aa_1.conda#37218233bcdc310e4fde6453bc1b40d8 https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e @@ -177,54 +177,54 @@ https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f55 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_0.conda#ee7f18d58a96b04fdbd2e55f7694ae0d +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_1.conda#cbe186eefb0bcd91e8f47c3908489874 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hd1e30aa_1.conda#7c7ab84210a972d9efe49655802e904c https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39h7a31438_5.conda#a2271dc58a5291d18b42fa4d45d42218 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_0.conda#434246edfc30e20c0847d4c2caff0a53 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py39hd1e30aa_0.conda#de06dc7edaddbd3b60c050f3a95d6fe6 https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.0.1-pyhd8ed1ab_0.conda#d978c61aa5fc2c69380d53ad56b5ae86 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1ab_0.conda#48b0d98e0c0ec810d3ccc2a0926c8c0e https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h74d50f4_7.conda#3453ac94a99ad9daf17e8a313d274567 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3abc6e6ab60fa404c23094f5a03ec9b https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py39haaeba84_0.conda#f97a95fab7c69678ebf6b57396b1323e +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py39h444a776_1.conda#52ad49ce520bec37ff0423b16c8bb052 https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py39h3d6467e_0.conda#4eaef850715aff114e2126a2f1a7b1f0 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py39h3d6467e_1.conda#39d2473881976eeb57c09c106d2d9fc3 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.5-h98fc4e7_1.conda#483fe58e14ba244110cd1be2b771b70f +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.5-pyhd8ed1ab_0.conda#3bda70bbeb2920f44db5375af2e5fe38 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_0.conda#882a66517c52cae2719ac25308f61316 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.0.1-pyhd8ed1ab_0.conda#54661981fd331e20847d8a49543dd9af +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.0-pyhd8ed1ab_0.conda#6a62c2cc25376a0d050b3d1d221c3ee9 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py39h6183b62_0.conda#a50279322335a176d74ed167f9ce468b +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py39h474f0d3_0.conda#62f1d2e05327bf62728afa448f2a9261 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py39h5ed0f51_1.conda#9c455b3b3b55f13b2094932740cd3efb -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py39h3d6467e_4.conda#b83a218fa97e9963c858d0db651a7506 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39hce394fd_1.conda#b5188eeb7df815911a7c8db0fc16b89b +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py39h3d6467e_5.conda#93aff412f3e49fdb43361c0215cbd72d https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 @@ -232,11 +232,11 @@ https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py39h7633fee_0.conda#b673f03c191683996e66c881f90aff2b https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.5-h8e1006c_1.conda#98206c865fccdea9723f0c6f9241a24f +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_0.conda#cd758f0e1d30ada1c320be50767dd55e https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.0-py39hddac248_0.conda#0a3624f600f51df010a274176e356ac5 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py39hddac248_0.conda#e335750b7e47bb4d12db170e1a6e2608 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py39h474f0d3_1.conda#f62409d868e23c1f97ae2b0db5658385 @@ -249,7 +249,7 @@ https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.cond https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py39he9076e7_0.conda#a529a20267af9f085c7f991cae79fef2 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py39he9076e7_1.conda#3337ebf55443f31a1148f3ca6f1d9673 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4282601_102.conda#05390bd5ad0ddc2f719392d087673344 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h0f8d45d_0.conda#74b1d479057aa11a70779c83262df85e @@ -258,9 +258,9 @@ https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.co https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_4.conda#e12391692d70732bf1df08b7ecf40095 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py39hf3d152e_0.conda#e348333b50ff1f978f3d6af24512de0b -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.0-pyhd8ed1ab_0.conda#16cff214435f2a8163fbe67db9eafb96 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_5.conda#e1f148e57d071b09187719df86f513c1 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py39hf3d152e_1.conda#3b6499658e281eb7204161f336926071 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 @@ -270,4 +270,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 - From c3412f73547def28fb50a6105b23e669fc475e36 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 2 Oct 2023 13:52:54 +0100 Subject: [PATCH 057/134] Ensure removal of release candidate from What's New title. (#5526) --- docs/src/whatsnew/3.7.rst | 10 +++++----- tools/release_do_nothing.py | 5 +++++ 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/docs/src/whatsnew/3.7.rst b/docs/src/whatsnew/3.7.rst index d5ea21f3d7..f1c7fb5f2c 100644 --- a/docs/src/whatsnew/3.7.rst +++ b/docs/src/whatsnew/3.7.rst @@ -1,7 +1,7 @@ .. include:: ../common_links.inc -v3.7 (16 Aug 2023) [release candidate] -************************************** +v3.7 (16 Aug 2023) +****************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) @@ -46,7 +46,7 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles lazy data. (:pull:`5307`) - + .. _concat_warnings: #. `@acchamber`_ added error and warning messages about coordinate overlaps to @@ -69,11 +69,11 @@ This document explains the changes made to Iris for this release ============= #. `@acchamber`_ fixed a bug with :func:`~iris.util.unify_time_units` so it does not block - concatenation through different data types in rare instances. (:pull:`5372`) + concatenation through different data types in rare instances. (:pull:`5372`) #. `@acchamber`_ removed some obsolete code that prevented extraction of time points from cubes with bounded times (:pull:`5175`) - + .. _cftime_warnings: #. `@rcomer`_ modified pp-loading to avoid a ``cftime`` warning for non-standard diff --git a/tools/release_do_nothing.py b/tools/release_do_nothing.py index 5d7dd2abf2..afe12a662d 100755 --- a/tools/release_do_nothing.py +++ b/tools/release_do_nothing.py @@ -279,6 +279,11 @@ def finalise_whats_new( whatsnew_title += " [release candidate]" # TODO: automate message = f"In {rsts.release.name}: set the page title to:\n{whatsnew_title}\n" + if not is_release_candidate: + message += ( + "\nBe sure to remove any existing mentions of release " + "candidate from the title.\n" + ) _wait_for_done(message) message = ( From 47ec7f3ecf043534f8b9183c5a552d259960d60b Mon Sep 17 00:00:00 2001 From: Henry Wright <84939917+HGWright@users.noreply.github.com> Date: Mon, 2 Oct 2023 16:11:49 +0100 Subject: [PATCH 058/134] updating docs and stale comment (#5522) --- .github/workflows/stale.yml | 2 +- .../developers_guide/contributing_documentation_easy.rst | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 67b0515e8b..f363410347 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -46,7 +46,7 @@ jobs: If you still care about this issue, then please either: * Re-open this issue, if you have sufficient permissions, or - * Add a comment pinging `@SciTools/iris-devs` who will re-open on your behalf. + * Add a comment stating that this is still relevant and someone will re-open it on your behalf. # Comment on the staled prs while closed. close-pr-message: | diff --git a/docs/src/developers_guide/contributing_documentation_easy.rst b/docs/src/developers_guide/contributing_documentation_easy.rst index f54de628bf..51554f9e19 100755 --- a/docs/src/developers_guide/contributing_documentation_easy.rst +++ b/docs/src/developers_guide/contributing_documentation_easy.rst @@ -81,9 +81,9 @@ Describing what you've changed and why will help the person who reviews your cha .. tip:: If you're not sure that you're making your pull request right, or have a - question, then make it anyway! You can then comment on it tagging - ``@SciTools/iris-devs`` to ask your question (then edit your pull request if - you need to). + question, then make it anyway! You can then comment on it to ask your + question, then someone from the dev team will be happy to help you out (then + edit your pull request if you need to). What Happens Next? ^^^^^^^^^^^^^^^^^^ From 58220d1eef65363522226a4545942aeee317ce2b Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Tue, 3 Oct 2023 11:54:17 +0100 Subject: [PATCH 059/134] Set some memory benchmarks to on-demand to reduce noise. (#5481) --- .../benchmarks/experimental/ugrid/regions_combine.py | 8 +++++++- benchmarks/benchmarks/save.py | 4 +++- docs/src/whatsnew/latest.rst | 5 +++-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index 16044c663a..5ecc90930b 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -23,7 +23,7 @@ from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD from iris.experimental.ugrid.utils import recombine_submeshes -from ... import TrackAddedMemoryAllocation +from ... import TrackAddedMemoryAllocation, on_demand_benchmark from ...generate_data.ugrid import make_cube_like_2d_cubesphere @@ -200,6 +200,8 @@ class CombineRegionsComputeRealData(MixinCombineRegions): def time_compute_data(self, n_cubesphere): _ = self.recombined_cube.data + # Vulnerable to noise, so disabled by default. + @on_demand_benchmark @TrackAddedMemoryAllocation.decorator def track_addedmem_compute_data(self, n_cubesphere): _ = self.recombined_cube.data @@ -217,6 +219,8 @@ def time_save(self, n_cubesphere): # Save to disk, which must compute data + stream it to file. save(self.recombined_cube, "tmp.nc") + # Vulnerable to noise, so disabled by default. + @on_demand_benchmark @TrackAddedMemoryAllocation.decorator def track_addedmem_save(self, n_cubesphere): save(self.recombined_cube, "tmp.nc") @@ -245,6 +249,8 @@ def time_stream_file2file(self, n_cubesphere): # Save to disk, which must compute data + stream it to file. save(self.recombined_cube, "tmp.nc") + # Vulnerable to noise, so disabled by default. + @on_demand_benchmark @TrackAddedMemoryAllocation.decorator def track_addedmem_stream_file2file(self, n_cubesphere): save(self.recombined_cube, "tmp.nc") diff --git a/benchmarks/benchmarks/save.py b/benchmarks/benchmarks/save.py index d00c66a0ca..e9a7918dcc 100644 --- a/benchmarks/benchmarks/save.py +++ b/benchmarks/benchmarks/save.py @@ -16,7 +16,7 @@ from iris import save from iris.experimental.ugrid import save_mesh -from . import TrackAddedMemoryAllocation +from . import TrackAddedMemoryAllocation, on_demand_benchmark from .generate_data.ugrid import make_cube_like_2d_cubesphere @@ -47,6 +47,8 @@ def time_netcdf_save_mesh(self, n_cubesphere, is_unstructured): if is_unstructured: self._save_mesh(self.cube) + # Vulnerable to noise, so disabled by default. + @on_demand_benchmark @TrackAddedMemoryAllocation.decorator def track_addedmem_netcdf_save(self, n_cubesphere, is_unstructured): # Don't need to copy the cube here since track_ benchmarks don't diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index b4396ad0d5..333cce6db9 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -85,6 +85,9 @@ This document explains the changes made to Iris for this release working properly. (Main pull request: :pull:`5437`, more detail: :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) +#. `@trexfeathers`_ set a number of memory benchmarks to be on-demand, as they + were vulnerable to false positives in CI runs. (:pull:`5481`) + #. `@acchamber`_ and `@ESadek-MO`_ resolved several deprecation to reduce number of warnings raised during tests. (:pull:`5493`, :pull:`5511`) @@ -96,8 +99,6 @@ This document explains the changes made to Iris for this release longer using the ``--strict`` argument. (:pull:`5496`) - - .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: From ee6b225e14ad7e9cf2ad177307ae7a0baef65b61 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Tue, 10 Oct 2023 12:31:06 +0100 Subject: [PATCH 060/134] Updated environment lockfiles (#5524) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 93 +++++++++++++------------- requirements/locks/py311-linux-64.lock | 93 +++++++++++++------------- requirements/locks/py39-linux-64.lock | 91 ++++++++++++------------- 3 files changed, 140 insertions(+), 137 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index ac9c50ade5..2c863feb77 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -30,8 +30,8 @@ https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -40,7 +40,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.c https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 @@ -48,11 +48,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -68,8 +68,8 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d @@ -80,6 +80,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.con https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf @@ -87,25 +88,25 @@ https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#a https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_1.conda#85552d64cb49f12781668779efc738ec +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 @@ -115,16 +116,16 @@ https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_0.conda#fb6201eb1daa3a3a2f91a4833bdf27c7 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_1.conda#1f95722c94f00b69af69a066c7433714 https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1ab_0.conda#fef8ef5f0a54546b9efee39468229917 https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py310hc6cd4ac_2.conda#51af196e0a8d2f253b695f9a63d53f4b +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.0-pyhd8ed1ab_0.conda#3ed9a3229f177c298b3405de13acfcd8 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py310hc6cd4ac_0.conda#90bccd216944c486966c3846b339b42f https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 @@ -146,15 +147,15 @@ https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openbla https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_2.conda#4a180ab68881a86be49858c9baf4581d https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda#b74e07a054c479e45a83a83fc5be713c -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hd41b1e2_1.conda#39cac2febd98523afe9ed8e33c22f67d +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py310hd41b1e2_0.conda#03255e1437f31f25ad95bb45c8b398bb https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 -https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 +https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h2372a71_1.conda#cb25177acf28cc35cfa6c1ac1c679e22 @@ -164,7 +165,7 @@ https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py310h2372a71_1.conda#708d39474e54ba0f1832afb551633338 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py310h2372a71_0.conda#b631b889b0b4bc2fca7b8b977ca484b2 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_1.conda#bb010e368de4940771368bc3dc4c63e7 https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 @@ -179,7 +180,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda#b23e0147fa5f7a9380e06334c7266ad5 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h2372a71_1.conda#cd944e0f7a4d1529c6707bd65423bd64 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda#72637c58d36d9475fda24700c9796f19 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 @@ -191,10 +192,10 @@ https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h2fee648_5.conda#fef75d6c60d8a1cc7e6d1707f470a4e2 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.1-py310h2372a71_1.conda#d0d56bc5723b41cc1af97e2b78c54b9b -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_0.conda#4efe3a76fe724778a7235a2046b53233 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py310h2372a71_0.conda#1f18231ffab82f236ce074b2aaa07e54 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda#45846a970e71ac98fd327da5d40a0a2c +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.conda#33c03cd5711885c920ddff676fb84f98 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.43.1-py310h2372a71_0.conda#c7d552c32b87beb736c9658441bf93a9 https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 @@ -205,7 +206,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3a https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py310h29da1c1_1.conda#8e93b1c69cddf89fd412178d3d418bae https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 @@ -214,45 +215,45 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6d https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py310hc6cd4ac_1.conda#c7936ec7db24bb913671a1bc5eb2b79d https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.5-pyhd8ed1ab_0.conda#3bda70bbeb2920f44db5375af2e5fe38 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_0.conda#882a66517c52cae2719ac25308f61316 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.6-pyhd8ed1ab_0.conda#d5f8944ff9ab24a292511c83dce33dea +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py310hb13e2d6_0.conda#ac3b67e928cc71548efad9b522d42fef https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310h32c33b7_1.conda#453939b9853937e80c94e9eb3da75981 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310h32c33b7_2.conda#bfb5c8fe5b2cce3ca6140cbd61ecef3b https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py310hc6cd4ac_5.conda#ef5333594a958b25912002886b82b253 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py310hd41b1e2_0.conda#e00d52a8657a79b0a7c8c10559784759 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_0.conda#cd758f0e1d30ada1c320be50767dd55e -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hd41b1e2_4.conda#35e87277fba9944b8a975113538bb5df +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310h1f7b6fc_2.conda#7925aaa4330045bc32d334b20f446902 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py310hd41b1e2_1.conda#6a38f65d330b74495ad6990280486049 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.3-pyhd8ed1ab_0.conda#a7155483171dbc27a7385d1c26e779de +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.30-pyhd8ed1ab_0.conda#b7a2e3bb89bda8c69839485c20aabadf https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py310hcc13569_0.conda#ae976997aad218dc467d206ded2c8d8e +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py310hcc13569_1.conda#a64a2b4907b96d4bf3c9dab59563ab50 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py310hb13e2d6_1.conda#4f522fc9cb8ecfa25e39f5c2ea65b16b -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h7dcad9a_2.conda#a46061c83ed37bfa05d1ee96ec2fbb08 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h1f7b6fc_1.conda#be6f0382440ccbf9fb01bb19ab1f1fc0 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py310hb13e2d6_1.conda#4260b359d8fbeab4f789a8b0f968079f +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h7dcad9a_3.conda#c81a793e9680e0a07b6ab77c29cba6d7 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.2-pyhd8ed1ab_0.conda#ddb4fd6105b4005b312625cef210ba67 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py310h62c0568_1.conda#d6fe03e8d9e80b6e20b7ae60bf4f88b0 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_102.conda#6025039727a049ab4c0f2aab842c01cb -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_103.conda#0850d2a119d51601b20c406a4909af4d +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_2.conda#09cd3006f61e7a7054405f81362e0a5f +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h1f7b6fc_1.conda#857b828a13cdddf568958f7575b25b22 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 2d41a7dd3f..34781eb756 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -30,8 +30,8 @@ https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -40,7 +40,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.c https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 @@ -48,11 +48,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -68,8 +68,8 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d @@ -80,6 +80,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.con https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf @@ -87,25 +88,25 @@ https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#a https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_1.conda#85552d64cb49f12781668779efc738ec +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.5-hab00c5b_0_cpython.conda#f0288cb82594b1cbc71111d1cd3c5422 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda#b0dfbe2fcbfdb097d321bfd50ecddab1 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 @@ -115,16 +116,16 @@ https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hb755f60_0.conda#b8128d083dbf6abd472b1a3e98b0b83d +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hb755f60_1.conda#cce9e7c3f1c307f2a5fb08a2922d6164 https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1ab_0.conda#fef8ef5f0a54546b9efee39468229917 https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py311hb755f60_2.conda#35878142ccd98f076942b40ffb3cfbab +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.0-pyhd8ed1ab_0.conda#3ed9a3229f177c298b3405de13acfcd8 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py311hb755f60_0.conda#c54d71e8031a10d08f2e87ff81821588 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 @@ -146,15 +147,15 @@ https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openbla https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_2.conda#4a180ab68881a86be49858c9baf4581d https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_1.conda#71120b5155a0c500826cf81536721a15 -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311h9547e67_1.conda#079d914d7d7341663e30823c10083c0f +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py311h9547e67_0.conda#e826b71bf3dc8c91ee097663e2bcface https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 -https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 +https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h459d7ec_1.conda#490d7fa8675afd1aa6f1b2332d156a45 @@ -164,7 +165,7 @@ https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py311h459d7ec_1.conda#7e2181758f84a9c7e776af10fbb2f1a0 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py311h459d7ec_0.conda#60b5332b3989fda37884b92c7afd6a91 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py311h459d7ec_1.conda#52719a74ad130de8fb5d047dc91f247a https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 @@ -190,10 +191,10 @@ https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311hb3a22ac_5.conda#75d52ef1d318d18e554aadd13ce91b9d -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.1-py311h459d7ec_1.conda#bb0e424cb11a7e86700d0bf69e24faec -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_0.conda#5c416db47b7816e437eaf0d46e5c3a3d -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py311h459d7ec_0.conda#fc327c0ea015db3b6484eabb37d44e60 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.43.1-py311h459d7ec_0.conda#ac995b680de3bdce2531c553b27dfe7e https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 @@ -204,7 +205,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3a https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py311h8aef010_1.conda#4d66ee2081a7cd444ff6f30d95873eef https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 @@ -213,45 +214,45 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6d https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py311hb755f60_1.conda#e09eb6aad3607fb6f2c071a2c6a26e1d https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.5-pyhd8ed1ab_0.conda#3bda70bbeb2920f44db5375af2e5fe38 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_0.conda#882a66517c52cae2719ac25308f61316 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.6-pyhd8ed1ab_0.conda#d5f8944ff9ab24a292511c83dce33dea +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py311h64a7726_0.conda#bf16a9f625126e378302f08e7ed67517 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_1.conda#2e48207ebbd3e25ad14881a27e51fa1e +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_2.conda#8298afb85a731b02dac82e02b6e13ae0 https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py311hb755f60_5.conda#e4d262cc3600e70b505a6761d29f6207 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py311h9547e67_0.conda#db5b3b0093d0d4565e5c89578108402e -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_0.conda#cd758f0e1d30ada1c320be50767dd55e -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h1f0f07a_2.conda#571c0c47e8dbcf03577935ac818b6696 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py311h9547e67_1.conda#52d3de443952d33c5cee6b24b172ce96 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.3-pyhd8ed1ab_0.conda#a7155483171dbc27a7385d1c26e779de +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.30-pyhd8ed1ab_0.conda#b7a2e3bb89bda8c69839485c20aabadf https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py311h320fe9a_0.conda#1692362ba82f0556099f0143f7842de3 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py311h320fe9a_1.conda#a4371a95a8ae703a22949af28467b93d https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py311h64a7726_1.conda#58af16843fc4469770bdbaf45d3a19de -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311he06c224_2.conda#10a1953d2f74d292b5de093ceea104b2 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311h1f0f07a_1.conda#86b71ff85f3e4c8a98b5bace6d9c4565 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py311h64a7726_1.conda#e4b4d3b764e2d029477d0db88248a8b5 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311he06c224_3.conda#0494ca2b1c365390d014b1295d79e9a3 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.2-pyhd8ed1ab_0.conda#ddb4fd6105b4005b312625cef210ba67 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py311h54ef318_1.conda#20d79e2fe53b49b399f3d36977b05abb -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311he8ad708_102.conda#b48083ba918347f30efa94f7dc694919 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311he8ad708_103.conda#97b45ba4ff4e46a07dd6c60040256538 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_2.conda#09cd3006f61e7a7054405f81362e0a5f +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_1.conda#cd36a89a048ad2bcc6d8b43f648fb1d0 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index fac192e99b..068b62c558 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -30,8 +30,8 @@ https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_0.conda#e805cbec4c29feb22e019245f7e47b6c +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd @@ -40,7 +40,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.c https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 @@ -48,11 +48,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -68,8 +68,8 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_0.conda#43017394a280a42b48d11d2a6e169901 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_0.conda#8e3e1cb77c4b355a3776bdfb74095bed +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d @@ -80,6 +80,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.con https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf @@ -87,25 +88,25 @@ https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#a https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_1.conda#85552d64cb49f12781668779efc738ec +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_0.conda#aeafb07a327e3f14a796bf081ea07472 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hbc2eb40_0.conda#38f84d395629e48b7c7b48a8ca740341 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda#22c89a3d87828fe925b310b9cdf0f574 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 @@ -115,16 +116,16 @@ https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_0.conda#3db48055eab680e43a122e2c7494e7ae -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_0.conda#8a1b6b1f5e230aaf6408d6b0aef3492f +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_1.conda#c48418c8b35f1d59ae9ae1174812b40a https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1ab_0.conda#fef8ef5f0a54546b9efee39468229917 https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.2-py39h3d6467e_2.conda#3e27ec02c612b1580e94fe8468c28040 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.0-pyhd8ed1ab_0.conda#3ed9a3229f177c298b3405de13acfcd8 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py39h3d6467e_0.conda#13febcb5470ba004eeb3e7883fa66e79 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f @@ -146,15 +147,15 @@ https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openbla https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda#b9ce311e7aba8b5fc3122254f0a6e97e +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_2.conda#4a180ab68881a86be49858c9baf4581d https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_1.conda#ee2b4665b852ec6ff2758f3c1b91233d -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h7633fee_1.conda#45d24492a2acf7d8ad8c4de6491a4fe2 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py39h7633fee_0.conda#e39816a8abd539079a9d0b3c9045b2cb https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 -https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 +https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39hd1e30aa_1.conda#c2e412b0f11e5983bcfc35d9beb91ecb @@ -164,7 +165,7 @@ https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.3.0-py39hd1e30aa_1.conda#6295f6164ced8634e3d95cd5d08ad95d +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py39hd1e30aa_0.conda#756cb152772a225587a05ca0ec68fc08 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py39hd1e30aa_1.conda#37218233bcdc310e4fde6453bc1b40d8 https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 @@ -179,7 +180,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_1.conda#cbe186eefb0bcd91e8f47c3908489874 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hd1e30aa_1.conda#7c7ab84210a972d9efe49655802e904c +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hd1e30aa_0.conda#1da984bbb6e765743e13388ba7b7b2c8 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 @@ -191,9 +192,9 @@ https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39h7a31438_5.conda#a2271dc58a5291d18b42fa4d45d42218 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_0.conda#434246edfc30e20c0847d4c2caff0a53 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.1-py39hd1e30aa_0.conda#de06dc7edaddbd3b60c050f3a95d6fe6 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda#ac992767d7f8ed2cb27e71e78f0fb2d7 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_1.conda#e5b62f0c1f96413116f16d33973f1a44 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.43.1-py39hd1e30aa_0.conda#74b032179f7782051800908cb2250132 https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 @@ -205,7 +206,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3a https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_1.conda#6ceb4e000cbe0b56b290180aea8520e8 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py39h444a776_1.conda#52ad49ce520bec37ff0423b16c8bb052 https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 @@ -214,45 +215,45 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6d https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py39h3d6467e_1.conda#39d2473881976eeb57c09c106d2d9fc3 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.5-pyhd8ed1ab_0.conda#3bda70bbeb2920f44db5375af2e5fe38 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_0.conda#882a66517c52cae2719ac25308f61316 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.6-pyhd8ed1ab_0.conda#d5f8944ff9ab24a292511c83dce33dea +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.0-pyhd8ed1ab_0.conda#6a62c2cc25376a0d050b3d1d221c3ee9 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py39h474f0d3_0.conda#62f1d2e05327bf62728afa448f2a9261 https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39hce394fd_1.conda#b5188eeb7df815911a7c8db0fc16b89b +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39hce394fd_2.conda#cb5ecd8db6d8ca8b9f281658a8512433 https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py39h3d6467e_5.conda#93aff412f3e49fdb43361c0215cbd72d https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py39h7633fee_0.conda#b673f03c191683996e66c881f90aff2b -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.2-pyhd8ed1ab_0.conda#cce7eeb7eda0124af186a5e9ce9b0fca -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_0.conda#cd758f0e1d30ada1c320be50767dd55e -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.29-pyhd8ed1ab_0.conda#5bdbb1cb692649720b60f261b41760cd +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39h7633fee_4.conda#b66595fbda99771266f042f42c7457be +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h44dd56e_2.conda#bb788b462770a49433d7412e7881d917 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py39h7633fee_1.conda#33afb3357cd0d120ecb26778d37579e4 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.3-pyhd8ed1ab_0.conda#a7155483171dbc27a7385d1c26e779de +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.30-pyhd8ed1ab_0.conda#b7a2e3bb89bda8c69839485c20aabadf https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py39hddac248_0.conda#e335750b7e47bb4d12db170e1a6e2608 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py39hddac248_1.conda#f32809db710b8aac48fbc14c13058530 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py39h474f0d3_1.conda#f62409d868e23c1f97ae2b0db5658385 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39h1bc45ef_2.conda#d79ed0ee1738151284ebd97092a6a210 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h44dd56e_1.conda#d037c20e3da2e85f03ebd20ad480c359 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py39h474f0d3_1.conda#55441724fedb3042d38ffa5220f00804 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39h1bc45ef_3.conda#bd2f1acb1bb15e30191370eaae54082e https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.2-pyhd8ed1ab_0.conda#ddb4fd6105b4005b312625cef210ba67 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py39he9076e7_1.conda#3337ebf55443f31a1148f3ca6f1d9673 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4282601_102.conda#05390bd5ad0ddc2f719392d087673344 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_1.conda#3fb5ba328a77c9fd71197a46e7f2469a -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h0f8d45d_0.conda#74b1d479057aa11a70779c83262df85e +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4282601_103.conda#c61de71bd3099973376aa370e3a0b39e +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_2.conda#09cd3006f61e7a7054405f81362e0a5f +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h44dd56e_1.conda#90c5165691fdcb5a9f43907e32ea48b4 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 From 0b569cdfad5e67f4aec3562bb26460020c7c7354 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Tue, 10 Oct 2023 16:44:52 +0100 Subject: [PATCH 061/134] Gallery: show colour bar stealing space from multiple axes (#5537) * Gallery: show colour bar stealing from multiple axes * use Iris' automatic axes replacement * update test data version * Link gallery page from whatsnew Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --------- Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- .github/workflows/benchmarks_run.yml | 2 +- .github/workflows/ci-tests.yml | 2 +- .../gallery_code/meteorology/plot_COP_maps.py | 22 +++++-------------- docs/src/whatsnew/latest.rst | 4 ++++ lib/iris/tests/results/imagerepo.json | 2 +- 5 files changed, 13 insertions(+), 19 deletions(-) diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmarks_run.yml index a39c531a77..02b913c6f0 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmarks_run.yml @@ -29,7 +29,7 @@ jobs: env: IRIS_TEST_DATA_LOC_PATH: benchmarks IRIS_TEST_DATA_PATH: benchmarks/iris-test-data - IRIS_TEST_DATA_VERSION: "2.19" + IRIS_TEST_DATA_VERSION: "2.21" # Lets us manually bump the cache to rebuild ENV_CACHE_BUILD: "0" TEST_DATA_CACHE_BUILD: "2" diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 5c48966ce8..8d84d4e137 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -50,7 +50,7 @@ jobs: session: "tests" env: - IRIS_TEST_DATA_VERSION: "2.19" + IRIS_TEST_DATA_VERSION: "2.21" ENV_NAME: "ci-tests" steps: diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 5e158346a9..529018ec8c 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -171,23 +171,13 @@ def main(): ) plt.gca().coastlines() - # Now add a colourbar who's leftmost point is the same as the leftmost - # point of the left hand plot and rightmost point is the rightmost - # point of the right hand plot. - - # Get the positions of the 2nd plot and the left position of the 1st plot. - left, bottom, width, height = ax_array[1].get_position().bounds - first_plot_left = ax_array[0].get_position().bounds[0] - - # The width of the colorbar should now be simple. - width = left - first_plot_left + width - - # Add axes to the figure, to place the colour bar. - colorbar_axes = fig.add_axes([first_plot_left, 0.18, width, 0.03]) - - # Add the colour bar. + # Now add a colour bar which spans the two plots. Here we pass Figure.axes + # which is a list of all (two) axes currently on the figure. Note that + # these are different to the contents of ax_array, because those were + # standard Matplotlib Axes that Iris automatically replaced with Cartopy + # GeoAxes. cbar = plt.colorbar( - contour_result, colorbar_axes, orientation="horizontal" + contour_result, ax=fig.axes, aspect=60, orientation="horizontal" ) # Label the colour bar and add ticks. diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 333cce6db9..c5cd961679 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -75,6 +75,10 @@ This document explains the changes made to Iris for this release #. `@trexfeathers`_ documented the intended use of warnings filtering with Iris. See :ref:`filtering-warnings`. (:pull:`5509`) +#. `@rcomer`_ updated the + :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_maps.py` to show how + a colourbar may steal space from multiple axes. (:pull:`5537`) + 💼 Internal =========== diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 2313c25270..2a6e2c4dbc 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -1,6 +1,6 @@ { "gallery_tests.test_plot_COP_1d.0": "aefec91c3601249cc9b3336dc4c8cdb31a64c6d997b3c0eccb5932d285e42f33", - "gallery_tests.test_plot_COP_maps.0": "ea9130db95668524913e6ac168991f0d956e917ec76396b96a853dcf94696935", + "gallery_tests.test_plot_COP_maps.0": "ea91789995668566913e43474adb6a917e8d947c4b46957ec6716a91958e6f81", "gallery_tests.test_plot_SOI_filtering.0": "fa56f295c5e0694a3c17a58d95e8da536233da99984c5af4c6739b4a9a444eb4", "gallery_tests.test_plot_TEC.0": "e5a761b69a589a4bc46f9e48c65c6631ce61d1ce3982c13739b33193c0ee3f8c", "gallery_tests.test_plot_anomaly_log_colouring.0": "ec4464e384a39b13931a9b1c85696da968d5e6e63e26847bdbd399938d3c5a4c", From 447a892139f513d21506a0d1212b435f0ac0c485 Mon Sep 17 00:00:00 2001 From: Ataf Fazledin Ahamed Date: Tue, 17 Oct 2023 19:06:12 +0600 Subject: [PATCH 062/134] Replaced `NotImplementedError` with `NotImplemented` (#5544) * Replaced `NotImplementedError` with `NotImplemented` * Added contribution to `latest.rst` * Added github handle in the link section --- docs/src/whatsnew/latest.rst | 4 ++++ lib/iris/coords.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index c5cd961679..9564863d1b 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -102,6 +102,9 @@ This document explains the changes made to Iris for this release #. `@trexfeathers`_ adapted benchmarking to work with ASV ``>=v0.6`` by no longer using the ``--strict`` argument. (:pull:`5496`) +#. `@fazledyn-or`_ replaced ``NotImplementedError`` with ``NotImplemented`` as + a proper method call. (:pull:`5544`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, @@ -109,6 +112,7 @@ This document explains the changes made to Iris for this release .. _@scottrobinson02: https://github.com/scottrobinson02 .. _@acchamber: https://github.com/acchamber +.. _@fazledyn-or: https://github.com/fazledyn-or .. comment diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 3ff9bc8e5e..d5ee2667d8 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -3119,7 +3119,7 @@ def __str__(self): def __add__(self, other): # Disable the default tuple behaviour of tuple concatenation - raise NotImplementedError() + return NotImplemented def xml_element(self, doc): """ From e9c77c05843c0c01947ca3e0927b1c7df0aad472 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Tue, 17 Oct 2023 14:54:57 +0100 Subject: [PATCH 063/134] Updated environment lockfiles (#5545) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 56 +++++++++++++------------- requirements/locks/py311-linux-64.lock | 56 +++++++++++++------------- requirements/locks/py39-linux-64.lock | 56 +++++++++++++------------- 3 files changed, 84 insertions(+), 84 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 2c863feb77..2655960622 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -20,7 +20,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.cond https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.20.1-hd590300_0.conda#6642e4faa4804be3a0e7edfefbd16595 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -38,7 +38,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda# https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 @@ -67,7 +67,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f0 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 @@ -78,14 +78,14 @@ https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.2-h2797004_0.conda#4b441a1ee22397d5a27dc1126b849edd https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_5.conda#1e8ef4090ca4f0d66404a7441e1dbf3c https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 @@ -101,19 +101,19 @@ https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_5.conda#b72f016c910ff9295b1377d3e17da3f2 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.2-h2c6b66d_0.conda#c37b95bcd6c6833dacfd5df0ae2f4303 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 +https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f @@ -124,7 +124,7 @@ https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.0-pyhd8ed1ab_0.conda#3ed9a3229f177c298b3405de13acfcd8 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py310hc6cd4ac_0.conda#90bccd216944c486966c3846b339b42f https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 @@ -134,7 +134,7 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#6 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6c15284_3.conda#06f97c8b69157d91993af0c4f2e16bdc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed @@ -142,14 +142,14 @@ https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda#b8d67603d43b23ce7e988a5d81a7ab79 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda#9ab79924a3760f85a799f21bc99bd655 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_2.conda#4a180ab68881a86be49858c9baf4581d +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda#b74e07a054c479e45a83a83fc5be713c https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py310hd41b1e2_0.conda#03255e1437f31f25ad95bb45c8b398bb @@ -183,15 +183,15 @@ https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda#72637c58d36d9475fda24700c9796f19 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde +https://conda.anaconda.org/conda-forge/noarch/babel-2.13.0-pyhd8ed1ab_0.conda#22541af7a9eb59fc6afcadb7ecdf9219 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda#45846a970e71ac98fd327da5d40a0a2c https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.conda#33c03cd5711885c920ddff676fb84f98 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 @@ -202,12 +202,12 @@ https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c7 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3abc6e6ab60fa404c23094f5a03ec9b +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py310h29da1c1_1.conda#8e93b1c69cddf89fd412178d3d418bae +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py310h01dd4db_2.conda#9ef290f84bf1f3932e9b42117d9364ff https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 @@ -241,26 +241,26 @@ https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py310hcc13569_1.con https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h1f7b6fc_1.conda#be6f0382440ccbf9fb01bb19ab1f1fc0 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py310hb13e2d6_1.conda#4260b359d8fbeab4f789a8b0f968079f -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h7dcad9a_3.conda#c81a793e9680e0a07b6ab77c29cba6d7 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py310h7dcad9a_0.conda#0d7c35fe5cc1f436e368ddd500deb979 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h1f7b6fc_3.conda#ce30848c8731fe993893a872218dd37a https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py310h62c0568_1.conda#d6fe03e8d9e80b6e20b7ae60bf4f88b0 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py310h62c0568_2.conda#5c0d101ef8fc542778aa80795a759d08 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_103.conda#0850d2a119d51601b20c406a4909af4d -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_2.conda#09cd3006f61e7a7054405f81362e0a5f +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h1f7b6fc_1.conda#857b828a13cdddf568958f7575b25b22 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_5.conda#f4fe7a6e3d7c78c9de048ea9dda21690 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py310hff52083_1.conda#8cc0628c9703cf5c8404465e838cc5ae +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py310hff52083_2.conda#cda26b4d722d7319ce66df50332ff09b https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 34781eb756..0bbb6bfdcd 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -20,7 +20,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.cond https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.20.1-hd590300_0.conda#6642e4faa4804be3a0e7edfefbd16595 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -38,7 +38,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda# https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 @@ -67,7 +67,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f0 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 @@ -78,14 +78,14 @@ https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.2-h2797004_0.conda#4b441a1ee22397d5a27dc1126b849edd https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_5.conda#1e8ef4090ca4f0d66404a7441e1dbf3c https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 @@ -101,19 +101,19 @@ https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_5.conda#b72f016c910ff9295b1377d3e17da3f2 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda#b0dfbe2fcbfdb097d321bfd50ecddab1 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.2-h2c6b66d_0.conda#c37b95bcd6c6833dacfd5df0ae2f4303 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea +https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f @@ -124,7 +124,7 @@ https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.0-pyhd8ed1ab_0.conda#3ed9a3229f177c298b3405de13acfcd8 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py311hb755f60_0.conda#c54d71e8031a10d08f2e87ff81821588 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 @@ -134,7 +134,7 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#6 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6c15284_3.conda#06f97c8b69157d91993af0c4f2e16bdc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed @@ -142,14 +142,14 @@ https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda#9ab79924a3760f85a799f21bc99bd655 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_2.conda#4a180ab68881a86be49858c9baf4581d +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_1.conda#71120b5155a0c500826cf81536721a15 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py311h9547e67_0.conda#e826b71bf3dc8c91ee097663e2bcface @@ -182,15 +182,15 @@ https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_1.co https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde +https://conda.anaconda.org/conda-forge/noarch/babel-2.13.0-pyhd8ed1ab_0.conda#22541af7a9eb59fc6afcadb7ecdf9219 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 @@ -201,12 +201,12 @@ https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c7 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3abc6e6ab60fa404c23094f5a03ec9b +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py311h8aef010_1.conda#4d66ee2081a7cd444ff6f30d95873eef +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py311ha6c5da5_2.conda#d6de249502f16ac151fcef9f743937b9 https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 @@ -240,26 +240,26 @@ https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py311h320fe9a_1.con https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311h1f0f07a_1.conda#86b71ff85f3e4c8a98b5bace6d9c4565 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py311h64a7726_1.conda#e4b4d3b764e2d029477d0db88248a8b5 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311he06c224_3.conda#0494ca2b1c365390d014b1295d79e9a3 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311he06c224_0.conda#c90e2469d7512f3bba893533a82d7a02 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_3.conda#4ac4de995f18d232af077e7743568b97 https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py311h54ef318_1.conda#20d79e2fe53b49b399f3d36977b05abb +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py311h54ef318_2.conda#5655371cc61b8c31c369a7e709acb294 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311he8ad708_103.conda#97b45ba4ff4e46a07dd6c60040256538 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_2.conda#09cd3006f61e7a7054405f81362e0a5f +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_1.conda#cd36a89a048ad2bcc6d8b43f648fb1d0 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_5.conda#ec7e45bc76d9d0b69a74a2075932b8e8 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py311h38be061_1.conda#6a2cd22264c8a61c8a571bb6e524775f +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py311h38be061_2.conda#0289918d4a09bbd0b85fd23ddf1c3ac1 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 068b62c558..167fc29e4c 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -20,7 +20,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.cond https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.20.1-hd590300_0.conda#6642e4faa4804be3a0e7edfefbd16595 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -38,7 +38,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda# https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 @@ -67,7 +67,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f0 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 @@ -78,14 +78,14 @@ https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda#903fa782a9067d5934210df6d79220f6 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.2-h2797004_0.conda#4b441a1ee22397d5a27dc1126b849edd https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_4.conda#f6f0ac5665849afc0716213a6cff224d +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_5.conda#1e8ef4090ca4f0d66404a7441e1dbf3c https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 @@ -101,19 +101,19 @@ https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_4.conda#db7f2c877209ac620fcd1c3ce7407cf0 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_5.conda#b72f016c910ff9295b1377d3e17da3f2 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda#713f9eac95d051abe14c3774376854fe +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.2-h2c6b66d_0.conda#c37b95bcd6c6833dacfd5df0ae2f4303 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb +https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f @@ -124,7 +124,7 @@ https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.0-pyhd8ed1ab_0.conda#3ed9a3229f177c298b3405de13acfcd8 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py39h3d6467e_0.conda#13febcb5470ba004eeb3e7883fa66e79 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 @@ -134,7 +134,7 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#6 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6c15284_3.conda#06f97c8b69157d91993af0c4f2e16bdc +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed @@ -142,14 +142,14 @@ https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_1.conda#c9f74d717e5a2847a9f8b779c54130f2 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda#9ab79924a3760f85a799f21bc99bd655 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.3.0-hca28451_0.conda#4ab41bee09a2d2e08de5f09d6f1eef62 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_2.conda#4a180ab68881a86be49858c9baf4581d +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_1.conda#ee2b4665b852ec6ff2758f3c1b91233d https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py39h7633fee_0.conda#e39816a8abd539079a9d0b3c9045b2cb @@ -183,15 +183,15 @@ https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hd1e30aa_0.conda#1da984bbb6e765743e13388ba7b7b2c8 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde +https://conda.anaconda.org/conda-forge/noarch/babel-2.13.0-pyhd8ed1ab_0.conda#22541af7a9eb59fc6afcadb7ecdf9219 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda#3db543896d34fc6804ddfb9239dcb125 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda#ac992767d7f8ed2cb27e71e78f0fb2d7 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_1.conda#e5b62f0c1f96413116f16d33973f1a44 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.43.1-py39hd1e30aa_0.conda#74b032179f7782051800908cb2250132 @@ -202,12 +202,12 @@ https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3abc6e6ab60fa404c23094f5a03ec9b +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py39h444a776_1.conda#52ad49ce520bec37ff0423b16c8bb052 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py39had0adad_2.conda#4d5990bb620ed36b10a528324d9b75e3 https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 @@ -241,26 +241,26 @@ https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py39hddac248_1.cond https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h44dd56e_1.conda#d037c20e3da2e85f03ebd20ad480c359 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py39h474f0d3_1.conda#55441724fedb3042d38ffa5220f00804 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39h1bc45ef_3.conda#bd2f1acb1bb15e30191370eaae54082e +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py39h1bc45ef_0.conda#ca067895d22f8a0d38f225a95184858e https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h44dd56e_3.conda#cbc2fe7741df3546448a534827238c32 https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py39he9076e7_1.conda#3337ebf55443f31a1148f3ca6f1d9673 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py39he9076e7_2.conda#404144d0628ebbbbd56d161c677cc71b https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4282601_103.conda#c61de71bd3099973376aa370e3a0b39e -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.4.0-pyha770c72_2.conda#09cd3006f61e7a7054405f81362e0a5f +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h44dd56e_1.conda#90c5165691fdcb5a9f43907e32ea48b4 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda#a8dd2dfcd570e3965c73be6c5e03e74f +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_5.conda#e1f148e57d071b09187719df86f513c1 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py39hf3d152e_1.conda#3b6499658e281eb7204161f336926071 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py39hf3d152e_2.conda#ffe5ae58957da676064e2ce5d039d259 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 From f459f152c65d158d5a13f65b9b1e84a01afbdfc0 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Wed, 18 Oct 2023 13:19:27 +0100 Subject: [PATCH 064/134] nep29 drop table schedule numpy>1.21 (#5525) * nep29 drop table schedule numpy>1.12 * add whatsnew entry --- docs/src/whatsnew/latest.rst | 6 +++++- requirements/py310.yml | 2 +- requirements/py311.yml | 2 +- requirements/py39.yml | 2 +- requirements/pypi-core.txt | 4 ++-- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 9564863d1b..58d94bf5fa 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -66,7 +66,8 @@ This document explains the changes made to Iris for this release 🔗 Dependencies =============== -#. N/A +#. `@bjlittle`_ enforced the minimum pin of ``numpy>1.21`` in accordance with the `NEP29 Drop Schedule`_. + (:pull:`5525`) 📚 Documentation @@ -117,3 +118,6 @@ This document explains the changes made to Iris for this release .. comment Whatsnew resources in alphabetical order: + +.. _NEP29 Drop Schedule: https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule + diff --git a/requirements/py310.yml b/requirements/py310.yml index fd549a9cf7..b01586aac9 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -18,7 +18,7 @@ dependencies: - libnetcdf !=4.9.1 - matplotlib >=3.5 - netcdf4 - - numpy >=1.21, !=1.24.3 + - numpy >1.21, !=1.24.3 - python-xxhash - pyproj - scipy diff --git a/requirements/py311.yml b/requirements/py311.yml index a883e5d87a..286fe74a33 100644 --- a/requirements/py311.yml +++ b/requirements/py311.yml @@ -18,7 +18,7 @@ dependencies: - libnetcdf !=4.9.1 - matplotlib >=3.5 - netcdf4 - - numpy >=1.21, !=1.24.3 + - numpy >1.21, !=1.24.3 - python-xxhash - pyproj - scipy diff --git a/requirements/py39.yml b/requirements/py39.yml index 5b3c17510e..f534aef4f3 100644 --- a/requirements/py39.yml +++ b/requirements/py39.yml @@ -18,7 +18,7 @@ dependencies: - libnetcdf !=4.9.1 - matplotlib >=3.5 - netcdf4 - - numpy >=1.21, !=1.24.3 + - numpy >1.21, !=1.24.3 - python-xxhash - pyproj - scipy diff --git a/requirements/pypi-core.txt b/requirements/pypi-core.txt index 7937f73b4f..e286bb97bc 100644 --- a/requirements/pypi-core.txt +++ b/requirements/pypi-core.txt @@ -5,8 +5,8 @@ dask[array]>=2022.9.0 # libnetcdf!=4.9.1 (not available on PyPI) matplotlib>=3.5 netcdf4 -numpy>=1.21,!=1.24.3 +numpy>1.21,!=1.24.3 pyproj scipy shapely!=1.8.3 -xxhash \ No newline at end of file +xxhash From 69bbe98653ca371247eaa11e803144884db783c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 12:19:50 +0000 Subject: [PATCH 065/134] Bump scitools/workflows from 2023.09.1 to 2023.10.0 (#5540) --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 391f944310..65716338de 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.09.1 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.10.0 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index a3f0c7f05f..d92b653f26 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.09.1 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.10.0 secrets: inherit From 97ee9acb9894ebccf3412db713432cd45bbc293c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 16:30:19 +0100 Subject: [PATCH 066/134] [pre-commit.ci] pre-commit autoupdate (#5527) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) - [github.com/codespell-project/codespell: v2.2.5 → v2.2.6](https://github.com/codespell-project/codespell/compare/v2.2.5...v2.2.6) * codespell fixes (#5546) * codespell fixes * add whatsnew entry --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Bill Little --- .pre-commit-config.yaml | 4 ++-- benchmarks/benchmarks/cperf/__init__.py | 2 +- benchmarks/benchmarks/generate_data/stock.py | 2 +- benchmarks/benchmarks/load/__init__.py | 2 +- benchmarks/bm_runner.py | 2 +- docs/gallery_code/meteorology/plot_COP_1d.py | 2 +- docs/src/whatsnew/latest.rst | 4 ++++ lib/iris/_deprecation.py | 2 +- lib/iris/analysis/trajectory.py | 4 ++-- lib/iris/fileformats/netcdf/saver.py | 2 +- lib/iris/pandas.py | 2 +- lib/iris/tests/unit/analysis/test_PERCENTILE.py | 2 +- lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py | 4 ++-- lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py | 2 +- .../unit/plot/test__check_geostationary_coords_and_convert.py | 2 +- 15 files changed, 21 insertions(+), 17 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 32c51d35f9..a3e042e614 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ minimum_pre_commit_version: 1.21.0 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: # Prevent giant files from being committed. - id: check-added-large-files @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/codespell-project/codespell - rev: "v2.2.5" + rev: "v2.2.6" hooks: - id: codespell types_or: [asciidoc, python, markdown, rst] diff --git a/benchmarks/benchmarks/cperf/__init__.py b/benchmarks/benchmarks/cperf/__init__.py index fb311c44dc..814d29338f 100644 --- a/benchmarks/benchmarks/cperf/__init__.py +++ b/benchmarks/benchmarks/cperf/__init__.py @@ -53,7 +53,7 @@ def setup(self, file_type, three_d, three_times): if three_d: create_kwargs["n_levels"] = 71 - # Will re-use a file if already present. + # Will reuse a file if already present. file_path = make_cubesphere_testfile(**create_kwargs) else: diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py index eaf46bb405..954e791f43 100644 --- a/benchmarks/benchmarks/generate_data/stock.py +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -39,7 +39,7 @@ def _external(func_name_, temp_file_dir, **kwargs_): ) if not REUSE_DATA or not save_path.is_file(): # The xios functions take control of save location so need to move to - # a more specific name that allows re-use. + # a more specific name that allows reuse. actual_path = run_function_elsewhere( _external, func_name_=func_name, diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index 3b2a83b1b1..3d15629f9e 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -69,7 +69,7 @@ def time_realise(self, _, __, ___, ____) -> None: class STASHConstraint: - # xyz sizes mimic LoadAndRealise to maximise file re-use. + # xyz sizes mimic LoadAndRealise to maximise file reuse. params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"]] param_names = ["xyz", "file_format"] diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index b0f98c04ac..4b8f6e1f18 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -82,7 +82,7 @@ def _prep_data_gen_env() -> None: else: echo("Setting up the data generation environment ...") # Get Nox to build an environment for the `tests` session, but don't - # run the session. Will re-use a cached environment if appropriate. + # run the session. Will reuse a cached environment if appropriate. _subprocess_runner( [ "nox", diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index bebbad4224..2181b89b8c 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -54,7 +54,7 @@ def main(): ) # Generate area-weights array. As e1 and a1b are on the same grid we can - # do this just once and re-use. This method requires bounds on lat/lon + # do this just once and reuse. This method requires bounds on lat/lon # coords, so let's add some in sensible locations using the "guess_bounds" # method. e1.coord("latitude").guess_bounds() diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 58d94bf5fa..bcc411c773 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -106,6 +106,9 @@ This document explains the changes made to Iris for this release #. `@fazledyn-or`_ replaced ``NotImplementedError`` with ``NotImplemented`` as a proper method call. (:pull:`5544`) +#. `@bjlittle`_ corrected various comment spelling mistakes detected by + `codespell`_. (:pull:`5546`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, @@ -120,4 +123,5 @@ This document explains the changes made to Iris for this release Whatsnew resources in alphabetical order: .. _NEP29 Drop Schedule: https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule +.. _codespell: https://github.com/codespell-project/codespell diff --git a/lib/iris/_deprecation.py b/lib/iris/_deprecation.py index 8ad762a558..027e11f2dc 100644 --- a/lib/iris/_deprecation.py +++ b/lib/iris/_deprecation.py @@ -16,7 +16,7 @@ class IrisDeprecation(UserWarning): An Iris deprecation warning. Note this subclasses UserWarning for backwards compatibility with Iris' - original deprection warnings. Should subclass DeprecationWarning at the + original deprecation warnings. Should subclass DeprecationWarning at the next major release. """ diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 84ce89ab6f..2495ff12fc 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -734,7 +734,7 @@ class UnstructuredNearestNeigbourRegridder: """ - # TODO: cache the necessary bits of the operation so re-use can actually + # TODO: cache the necessary bits of the operation so reuse can actually # be more efficient. def __init__(self, src_cube, target_grid_cube): """ @@ -873,7 +873,7 @@ def __init__(self, src_cube, target_grid_cube): def __call__(self, src_cube): # Check the source cube X and Y coords match the original. # Note: for now, this is sufficient to ensure a valid trajectory - # interpolation, but if in future we save + re-use the cache context + # interpolation, but if in future we save and reuse the cache context # for the 'interpolate' call, we may need more checks here. # Check the given cube against the original. diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 1ff69df1f7..cfca507226 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1009,7 +1009,7 @@ def _add_inner_related_vars( for element in sorted( coordlike_elements, key=lambda element: element.name() ): - # Re-use, or create, the associated CF-netCDF variable. + # Reuse, or create, the associated CF-netCDF variable. cf_name = self._name_coord_map.name(element) if cf_name is None: # Not already present : create it diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index cb26b638e4..0d0e65d648 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -83,7 +83,7 @@ def _add_iris_coord(cube, name, points, dim, calendar=None): Add a Coord or other dimensional metadata to a Cube from a Pandas index or columns array. """ # Most functionality has been abstracted to _get_dimensional_metadata, - # allowing re-use in as_cube() and as_cubes(). + # allowing reuse in as_cube() and as_cubes(). coord = _get_dimensional_metadata(name, points, calendar) if coord.__class__ == DimCoord: diff --git a/lib/iris/tests/unit/analysis/test_PERCENTILE.py b/lib/iris/tests/unit/analysis/test_PERCENTILE.py index bfd3234d26..a29516c604 100644 --- a/lib/iris/tests/unit/analysis/test_PERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_PERCENTILE.py @@ -94,7 +94,7 @@ class ScipyAggregateMixin: Tests for calculations specific to the default (scipy) function. Includes tests on masked data and tests to verify that the function is called with the expected keywords. Needs to be used with AggregateMixin, as some of - these tests re-use its method. + these tests reuse its method. """ diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index f39f3706ee..64d106f97d 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -72,7 +72,7 @@ def setUpClass(cls): class TestProperties1D(TestMeshCommon): - # Tests that can re-use a single instance for greater efficiency. + # Tests that can reuse a single instance for greater efficiency. @classmethod def setUpClass(cls): super().setUpClass() @@ -737,7 +737,7 @@ def test___str__units_stdname(self): class TestOperations1D(TestMeshCommon): - # Tests that cannot re-use an existing Mesh instance, instead need a new + # Tests that cannot reuse an existing Mesh instance, instead need a new # one each time. def setUp(self): self.mesh = mesh.Mesh( diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index ba7306bded..b95c5f09bb 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -578,7 +578,7 @@ def _make_test_meshcoord( edge_xs = self.EDGECOORDS_BASENUM + np.arange(n_edges) face_xs = self.FACECOORDS_BASENUM + np.arange(n_faces) - # Record all these for re-use in tests + # Record all these for reuse in tests self.n_faces = n_faces self.n_nodes = n_nodes self.face_xs = face_xs diff --git a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py index 633dea85c4..a06a437396 100644 --- a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py +++ b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py @@ -37,7 +37,7 @@ def setUp(self): ) def _test(self, geostationary=True): - # Re-usable test for when Geostationary is present OR absent. + # Reusable test for when Geostationary is present OR absent. if geostationary: # A Geostationary projection WILL be processed. projection_spec = Geostationary From 0b50a2cda31a38b7eb759776355f2853bc5fb453 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 24 Oct 2023 09:04:07 +0100 Subject: [PATCH 067/134] [pre-commit.ci] pre-commit autoupdate (#5549) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.9.1 → 23.10.0](https://github.com/psf/black/compare/23.9.1...23.10.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a3e042e614..db6237d944 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: additional_dependencies: [tomli] - repo: https://github.com/psf/black - rev: 23.9.1 + rev: 23.10.0 hooks: - id: black pass_filenames: false From f30db0d476633d0aa12813d904411e14ccad6e5e Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Tue, 24 Oct 2023 15:44:27 +0100 Subject: [PATCH 068/134] Oblique and Rotated Mercator (#5548) * Introduce new coord system classes. * Add loading code for oblique mercator. * Fix for azimuth check. * Add saving code for oblique mercator. * Fix to rotated repr. * Scale factor wording fix. * Tests first pass. * Temp test disable. * Temp RotatedMercator test disable. * Deprecate RotatedMercator. * Revert "Temp RotatedMercator test disable." This reverts commit 27c486f3122a6056618b865c0534e7b871da9288. * First attempted fix for RM test inheritance. * Revert "Temp test disable." This reverts commit a81507c59f4a873ca32e8aa683f91e3215e09ff9. * Fix warnings doctests. * Add deprecation test for RotatedMercator. * Oblique Mercator loading tests. * Oblique Mercator loading deprecation test. * Saving test for Oblique Mercator. * Fix isinstance() check. * What's New entry. * Temp test disable. * More temp test disabling. * WIP testing. * WIP testing. * Revert "More temp test disabling." This reverts commit ff251b75f87e733fe349fce7777a66fa8b637ca4. * Revert "Temp test disable." This reverts commit 77eba551ded2d88839566ec67d6195fcd6df8aae. * Use RotatedMercator inheritance for isinstance() check. * Check grid_mapping_name instead of using isinstance(). * Better type hinting. * Use return over yield in a fixture. * Duck typing comment. * Better grid_mapping_name checking. * Better structure for test parameterisation. --- .../src/further_topics/filtering_warnings.rst | 12 +- docs/src/whatsnew/latest.rst | 4 + lib/iris/coord_systems.py | 196 ++++++++++++++++++ .../fileformats/_nc_load_rules/actions.py | 8 + .../fileformats/_nc_load_rules/helpers.py | 56 +++++ lib/iris/fileformats/netcdf/saver.py | 31 +++ .../coord_systems/test_ObliqueMercator.py | 165 +++++++++++++++ .../coord_systems/test_RotatedMercator.py | 39 ++++ ...uild_oblique_mercator_coordinate_system.py | 183 ++++++++++++++++ .../fileformats/netcdf/saver/test_Saver.py | 46 ++++ 10 files changed, 734 insertions(+), 6 deletions(-) create mode 100644 lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py create mode 100644 lib/iris/tests/unit/coord_systems/test_RotatedMercator.py create mode 100644 lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst index 689ea69a52..2cbad525d3 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/further_topics/filtering_warnings.rst @@ -47,9 +47,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:454: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:456: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:821: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:823: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -110,7 +110,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:454: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:456: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) :: @@ -125,10 +125,10 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=454) + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=456) ... my_operation() ... - iris/coord_systems.py:821: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:823: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: @@ -188,7 +188,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:454: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:456: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) ---- diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index bcc411c773..49698ca501 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -34,6 +34,10 @@ This document explains the changes made to Iris for this release :class:`UserWarning`\s for richer filtering. The full index of sub-categories can be seen here: :mod:`iris.exceptions` . (:pull:`5498`) +#. `@trexfeathers`_ added the :class:`~iris.coord_systems.ObliqueMercator` + and :class:`~iris.coord_systems.RotatedMercator` coordinate systems, + complete with NetCDF loading and saving. (:pull:`5548`) + 🐛 Bugs Fixed ============= diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index e2003d1286..3d986fefce 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -10,11 +10,13 @@ from abc import ABCMeta, abstractmethod from functools import cached_property +import re import warnings import cartopy.crs as ccrs import numpy as np +from iris._deprecation import warn_deprecated import iris.exceptions @@ -1634,3 +1636,197 @@ def as_cartopy_crs(self): def as_cartopy_projection(self): return self.as_cartopy_crs() + + +class ObliqueMercator(CoordSystem): + """ + A cylindrical map projection, with XY coordinates measured in metres. + + Designed for regions not well suited to :class:`Mercator` or + :class:`TransverseMercator`, as the positioning of the cylinder is more + customisable. + + See Also + -------- + :class:`RotatedMercator` + + """ + + grid_mapping_name = "oblique_mercator" + + def __init__( + self, + azimuth_of_central_line, + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting=None, + false_northing=None, + scale_factor_at_projection_origin=None, + ellipsoid=None, + ): + """ + Constructs an ObliqueMercator object. + + Parameters + ---------- + azimuth_of_central_line : float + Azimuth of centerline clockwise from north at the center point of + the centre line. + latitude_of_projection_origin : float + The true longitude of the central meridian in degrees. + longitude_of_projection_origin: float + The true latitude of the planar origin in degrees. + false_easting: float, optional + X offset from the planar origin in metres. + Defaults to 0.0 . + false_northing: float, optional + Y offset from the planar origin in metres. + Defaults to 0.0 . + scale_factor_at_projection_origin: float, optional + Scale factor at the central meridian. + Defaults to 1.0 . + ellipsoid: :class:`GeogCS`, optional + If given, defines the ellipsoid. + + Examples + -------- + >>> from iris.coord_systems import GeogCS, ObliqueMercator + >>> my_ellipsoid = GeogCS(6371229.0, None, 0.0) + >>> ObliqueMercator(90.0, -22.0, -59.0, -25000.0, -25000.0, 1., my_ellipsoid) + ObliqueMercator(azimuth_of_central_line=90.0, latitude_of_projection_origin=-22.0, longitude_of_projection_origin=-59.0, false_easting=-25000.0, false_northing=-25000.0, scale_factor_at_projection_origin=1.0, ellipsoid=GeogCS(6371229.0)) + + """ + #: Azimuth of centerline clockwise from north. + self.azimuth_of_central_line = float(azimuth_of_central_line) + + #: True latitude of planar origin in degrees. + self.latitude_of_projection_origin = float( + latitude_of_projection_origin + ) + + #: True longitude of planar origin in degrees. + self.longitude_of_projection_origin = float( + longitude_of_projection_origin + ) + + #: X offset from planar origin in metres. + self.false_easting = _arg_default(false_easting, 0) + + #: Y offset from planar origin in metres. + self.false_northing = _arg_default(false_northing, 0) + + #: Scale factor at the central meridian. + self.scale_factor_at_projection_origin = _arg_default( + scale_factor_at_projection_origin, 1.0 + ) + + #: Ellipsoid definition (:class:`GeogCS` or None). + self.ellipsoid = ellipsoid + + def __repr__(self): + return ( + "{!s}(azimuth_of_central_line={!r}, " + "latitude_of_projection_origin={!r}, " + "longitude_of_projection_origin={!r}, false_easting={!r}, " + "false_northing={!r}, scale_factor_at_projection_origin={!r}, " + "ellipsoid={!r})".format( + self.__class__.__name__, + self.azimuth_of_central_line, + self.latitude_of_projection_origin, + self.longitude_of_projection_origin, + self.false_easting, + self.false_northing, + self.scale_factor_at_projection_origin, + self.ellipsoid, + ) + ) + + def as_cartopy_crs(self): + globe = self._ellipsoid_to_globe(self.ellipsoid, None) + + return ccrs.ObliqueMercator( + central_longitude=self.longitude_of_projection_origin, + central_latitude=self.latitude_of_projection_origin, + false_easting=self.false_easting, + false_northing=self.false_northing, + scale_factor=self.scale_factor_at_projection_origin, + azimuth=self.azimuth_of_central_line, + globe=globe, + ) + + def as_cartopy_projection(self): + return self.as_cartopy_crs() + + +class RotatedMercator(ObliqueMercator): + """ + :class:`ObliqueMercator` with ``azimuth_of_central_line=90``. + + As noted in CF versions 1.10 and earlier: + + The Rotated Mercator projection is an Oblique Mercator projection + with azimuth = +90. + + .. deprecated:: 3.8.0 + This coordinate system was introduced as already scheduled for removal + in a future release, since CF version 1.11 onwards now requires use of + :class:`ObliqueMercator` with ``azimuth_of_central_line=90.`` . + Any :class:`RotatedMercator` instances will always be saved to NetCDF + as the ``oblique_mercator`` grid mapping. + + """ + + def __init__( + self, + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting=None, + false_northing=None, + scale_factor_at_projection_origin=None, + ellipsoid=None, + ): + """ + Constructs a RotatedMercator object. + + Parameters + ---------- + latitude_of_projection_origin : float + The true longitude of the central meridian in degrees. + longitude_of_projection_origin: float + The true latitude of the planar origin in degrees. + false_easting: float, optional + X offset from the planar origin in metres. + Defaults to 0.0 . + false_northing: float, optional + Y offset from the planar origin in metres. + Defaults to 0.0 . + scale_factor_at_projection_origin: float, optional + Scale factor at the central meridian. + Defaults to 1.0 . + ellipsoid: :class:`GeogCS`, optional + If given, defines the ellipsoid. + + """ + message = ( + "iris.coord_systems.RotatedMercator is deprecated, and will be " + "removed in a future release. Instead please use " + "iris.coord_systems.ObliqueMercator with " + "azimuth_of_central_line=90 ." + ) + warn_deprecated(message) + + super().__init__( + 90.0, + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + scale_factor_at_projection_origin, + ellipsoid, + ) + + def __repr__(self): + # Remove the azimuth argument from the parent repr. + result = super().__repr__() + result = re.sub(r"azimuth_of_central_line=\d*\.?\d*, ", "", result) + return result diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index be84b65132..44ef7ac549 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -156,6 +156,14 @@ def action_default(engine): None, hh.build_geostationary_coordinate_system, ), + hh.CF_GRID_MAPPING_OBLIQUE: ( + None, + hh.build_oblique_mercator_coordinate_system, + ), + hh.CF_GRID_MAPPING_ROTATED_MERCATOR: ( + None, + hh.build_oblique_mercator_coordinate_system, + ), } diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 19a9cd18ca..9c75c0e866 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -23,6 +23,7 @@ import pyproj import iris +from iris._deprecation import warn_deprecated import iris.aux_factory from iris.common.mixin import _get_valid_standard_name import iris.coord_systems @@ -124,6 +125,8 @@ CF_GRID_MAPPING_TRANSVERSE = "transverse_mercator" CF_GRID_MAPPING_VERTICAL = "vertical_perspective" CF_GRID_MAPPING_GEOSTATIONARY = "geostationary" +CF_GRID_MAPPING_OBLIQUE = "oblique_mercator" +CF_GRID_MAPPING_ROTATED_MERCATOR = "rotated_mercator" # # CF Attribute Names. @@ -154,6 +157,7 @@ CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" CF_ATTR_GRID_PERSPECTIVE_HEIGHT = "perspective_point_height" CF_ATTR_GRID_SWEEP_ANGLE_AXIS = "sweep_angle_axis" +CF_ATTR_GRID_AZIMUTH_CENT_LINE = "azimuth_of_central_line" CF_ATTR_POSITIVE = "positive" CF_ATTR_STD_NAME = "standard_name" CF_ATTR_LONG_NAME = "long_name" @@ -893,6 +897,58 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): return cs +################################################################################ +def build_oblique_mercator_coordinate_system(engine, cf_grid_var): + """ + Create an oblique mercator coordinate system from the CF-netCDF + grid mapping variable. + + """ + ellipsoid = _get_ellipsoid(cf_grid_var) + + azimuth_of_central_line = getattr( + cf_grid_var, CF_ATTR_GRID_AZIMUTH_CENT_LINE, None + ) + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + kwargs = dict( + azimuth_of_central_line=azimuth_of_central_line, + latitude_of_projection_origin=latitude_of_projection_origin, + longitude_of_projection_origin=longitude_of_projection_origin, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + false_easting=false_easting, + false_northing=false_northing, + ellipsoid=ellipsoid, + ) + + # Handle the alternative form noted in CF: rotated mercator. + grid_mapping_name = getattr(cf_grid_var, CF_ATTR_GRID_MAPPING_NAME) + candidate_systems = dict( + oblique_mercator=iris.coord_systems.ObliqueMercator, + rotated_mercator=iris.coord_systems.RotatedMercator, + ) + if grid_mapping_name == "rotated_mercator": + message = ( + "Iris will stop loading the rotated_mercator grid mapping name in " + "a future release, in accordance with CF version 1.11 . Instead " + "please use oblique_mercator with azimuth_of_central_line = 90 ." + ) + warn_deprecated(message) + del kwargs[CF_ATTR_GRID_AZIMUTH_CENT_LINE] + + cs = candidate_systems[grid_mapping_name](**kwargs) + return cs + + ################################################################################ def get_attr_units(cf_var, attributes): attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index cfca507226..011f74892d 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -102,6 +102,9 @@ # UKMO specific attributes that should not be global. _UKMO_DATA_ATTRS = ["STASH", "um_stash_source", "ukmo__process_flags"] +# TODO: whenever we advance to CF-1.11 we should then discuss a completion date +# for the deprecation of Rotated Mercator in coord_systems.py and +# _nc_load_rules/helpers.py . CF_CONVENTIONS_VERSION = "CF-1.7" _FactoryDefn = collections.namedtuple( @@ -2210,6 +2213,34 @@ def add_ellipsoid(ellipsoid): ) cf_var_grid.sweep_angle_axis = cs.sweep_angle_axis + # oblique mercator (and rotated variant) + # Use duck-typing over isinstance() - subclasses (i.e. + # RotatedMercator) upset mock tests. + elif ( + getattr(cs, "grid_mapping_name", None) + == "oblique_mercator" + ): + # RotatedMercator subclasses ObliqueMercator, and RM + # instances are implicitly saved as OM due to inherited + # properties. This is correct because CF 1.11 is removing + # all mention of RM. + if cs.ellipsoid: + add_ellipsoid(cs.ellipsoid) + cf_var_grid.azimuth_of_central_line = ( + cs.azimuth_of_central_line + ) + cf_var_grid.latitude_of_projection_origin = ( + cs.latitude_of_projection_origin + ) + cf_var_grid.longitude_of_projection_origin = ( + cs.longitude_of_projection_origin + ) + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + cf_var_grid.scale_factor_at_projection_origin = ( + cs.scale_factor_at_projection_origin + ) + # other else: warnings.warn( diff --git a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py new file mode 100644 index 0000000000..0799fb881e --- /dev/null +++ b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py @@ -0,0 +1,165 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :class:`iris.coord_systems.ObliqueMercator` class.""" + +from typing import List, NamedTuple +from unittest.mock import Mock + +from cartopy import crs as ccrs +import pytest + +from iris.coord_systems import GeogCS, ObliqueMercator + +#### +# ALL TESTS MUST BE CONTAINED IN CLASSES, TO ENABLE INHERITANCE BY +# test_RotatedMercator.py . +#### + + +class GlobeWithEq(ccrs.Globe): + def __eq__(self, other): + """Need eq to enable comparison with expected arguments.""" + result = NotImplemented + if isinstance(other, ccrs.Globe): + result = other.__dict__ == self.__dict__ + return result + + +class ParamTuple(NamedTuple): + """Used for easy coupling of test parameters.""" + + id: str + class_kwargs: dict + cartopy_kwargs: dict + + +kwarg_permutations: List[ParamTuple] = [ + ParamTuple( + "default", + dict(), + dict(), + ), + ParamTuple( + "azimuth", + dict(azimuth_of_central_line=90), + dict(azimuth=90), + ), + ParamTuple( + "central_longitude", + dict(longitude_of_projection_origin=90), + dict(central_longitude=90), + ), + ParamTuple( + "central_latitude", + dict(latitude_of_projection_origin=45), + dict(central_latitude=45), + ), + ParamTuple( + "false_easting_northing", + dict(false_easting=1000000, false_northing=-2000000), + dict(false_easting=1000000, false_northing=-2000000), + ), + ParamTuple( + "scale_factor", + # Number inherited from Cartopy's test_mercator.py . + dict(scale_factor_at_projection_origin=0.939692620786), + dict(scale_factor=0.939692620786), + ), + ParamTuple( + "globe", + dict(ellipsoid=GeogCS(1)), + dict( + globe=GlobeWithEq(semimajor_axis=1, semiminor_axis=1, ellipse=None) + ), + ), + ParamTuple( + "combo", + dict( + azimuth_of_central_line=90, + longitude_of_projection_origin=90, + latitude_of_projection_origin=45, + false_easting=1000000, + false_northing=-2000000, + scale_factor_at_projection_origin=0.939692620786, + ellipsoid=GeogCS(1), + ), + dict( + azimuth=90.0, + central_longitude=90.0, + central_latitude=45.0, + false_easting=1000000, + false_northing=-2000000, + scale_factor=0.939692620786, + globe=GlobeWithEq( + semimajor_axis=1, semiminor_axis=1, ellipse=None + ), + ), + ), +] +permutation_ids: List[str] = [p.id for p in kwarg_permutations] + + +class TestArgs: + GeogCS = GeogCS + class_kwargs_default = dict( + azimuth_of_central_line=0.0, + latitude_of_projection_origin=0.0, + longitude_of_projection_origin=0.0, + ) + cartopy_kwargs_default = dict( + central_longitude=0.0, + central_latitude=0.0, + false_easting=0.0, + false_northing=0.0, + scale_factor=1.0, + azimuth=0.0, + globe=None, + ) + + @pytest.fixture( + autouse=True, params=kwarg_permutations, ids=permutation_ids + ) + def make_variant_inputs(self, request) -> None: + """Parse a ParamTuple into usable test information.""" + inputs: ParamTuple = request.param + self.class_kwargs = dict( + self.class_kwargs_default, **inputs.class_kwargs + ) + self.cartopy_kwargs_expected = dict( + self.cartopy_kwargs_default, **inputs.cartopy_kwargs + ) + + def make_instance(self) -> ObliqueMercator: + return ObliqueMercator(**self.class_kwargs) + + @pytest.fixture() + def instance(self): + return self.make_instance() + + def test_instantiate(self): + _ = self.make_instance() + + def test_cartopy_crs(self, instance): + ccrs.ObliqueMercator = Mock() + instance.as_cartopy_crs() + ccrs.ObliqueMercator.assert_called_with(**self.cartopy_kwargs_expected) + + def test_cartopy_projection(self, instance): + ccrs.ObliqueMercator = Mock() + instance.as_cartopy_projection() + ccrs.ObliqueMercator.assert_called_with(**self.cartopy_kwargs_expected) + + @pytest.fixture() + def label_class(self, instance): + """Make the tested coordinate system available, even for subclasses.""" + from iris import coord_systems + + instance_class = "{!s}".format(instance.__class__.__name__) + globals()[instance_class] = getattr(coord_systems, instance_class) + + def test_repr(self, instance, label_class): + """Test that the repr can be used to regenerate an identical object.""" + assert eval(repr(instance)) == instance diff --git a/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py b/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py new file mode 100644 index 0000000000..97921efec6 --- /dev/null +++ b/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py @@ -0,0 +1,39 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :class:`iris.coord_systems.RotatedMercator` class.""" + +import pytest + +from iris._deprecation import IrisDeprecation +from iris.coord_systems import RotatedMercator + +from . import test_ObliqueMercator + + +class TestArgs(test_ObliqueMercator.TestArgs): + class_kwargs_default = dict( + latitude_of_projection_origin=0.0, + longitude_of_projection_origin=0.0, + ) + cartopy_kwargs_default = dict( + central_longitude=0.0, + central_latitude=0.0, + false_easting=0.0, + false_northing=0.0, + scale_factor=1.0, + azimuth=90.0, + globe=None, + ) + + def make_instance(self) -> RotatedMercator: + kwargs = self.class_kwargs + kwargs.pop("azimuth_of_central_line", None) + return RotatedMercator(**kwargs) + + +def test_deprecated(): + with pytest.warns(IrisDeprecation, match="azimuth_of_central_line=90"): + _ = RotatedMercator(0, 0) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py new file mode 100644 index 0000000000..b11d8d3cca --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py @@ -0,0 +1,183 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`. + +""" +from typing import List, NamedTuple, Type +from unittest import mock + +import pytest + +from iris import coord_systems +from iris._deprecation import IrisDeprecation +from iris.coord_systems import ( + CoordSystem, + GeogCS, + ObliqueMercator, + RotatedMercator, +) +from iris.fileformats._nc_load_rules.helpers import ( + build_oblique_mercator_coordinate_system, +) + + +class ParamTuple(NamedTuple): + """Used for easy coupling of test parameters.""" + + id: str + nc_attributes: dict + expected_class: Type[CoordSystem] + coord_system_kwargs: dict + + +kwarg_permutations: List[ParamTuple] = [ + ParamTuple( + "default", + dict(), + ObliqueMercator, + dict(), + ), + ParamTuple( + "azimuth", + dict(azimuth_of_central_line=90), + ObliqueMercator, + dict(azimuth_of_central_line=90), + ), + ParamTuple( + "central_longitude", + dict(longitude_of_projection_origin=90), + ObliqueMercator, + dict(longitude_of_projection_origin=90), + ), + ParamTuple( + "central_latitude", + dict(latitude_of_projection_origin=45), + ObliqueMercator, + dict(latitude_of_projection_origin=45), + ), + ParamTuple( + "false_easting_northing", + dict(false_easting=1000000, false_northing=-2000000), + ObliqueMercator, + dict(false_easting=1000000, false_northing=-2000000), + ), + ParamTuple( + "scale_factor", + # Number inherited from Cartopy's test_mercator.py . + dict(scale_factor_at_projection_origin=0.939692620786), + ObliqueMercator, + dict(scale_factor_at_projection_origin=0.939692620786), + ), + ParamTuple( + "globe", + dict(semi_major_axis=1), + ObliqueMercator, + dict(ellipsoid=GeogCS(semi_major_axis=1, semi_minor_axis=1)), + ), + ParamTuple( + "combo", + dict( + azimuth_of_central_line=90, + longitude_of_projection_origin=90, + latitude_of_projection_origin=45, + false_easting=1000000, + false_northing=-2000000, + scale_factor_at_projection_origin=0.939692620786, + semi_major_axis=1, + ), + ObliqueMercator, + dict( + azimuth_of_central_line=90.0, + longitude_of_projection_origin=90.0, + latitude_of_projection_origin=45.0, + false_easting=1000000, + false_northing=-2000000, + scale_factor_at_projection_origin=0.939692620786, + ellipsoid=GeogCS(semi_major_axis=1, semi_minor_axis=1), + ), + ), + ParamTuple( + "rotated", + dict(grid_mapping_name="rotated_mercator"), + RotatedMercator, + dict(), + ), + ParamTuple( + "rotated_azimuth_ignored", + dict( + grid_mapping_name="rotated_mercator", + azimuth_of_central_line=45, + ), + RotatedMercator, + dict(), + ), +] +permutation_ids: List[str] = [p.id for p in kwarg_permutations] + + +class TestAttributes: + """Test that NetCDF attributes are correctly converted to class arguments.""" + + nc_attributes_default = dict( + grid_mapping_name="oblique_mercator", + azimuth_of_central_line=0.0, + latitude_of_projection_origin=0.0, + longitude_of_projection_origin=0.0, + scale_factor_at_projection_origin=1.0, + # Optional attributes not included. + ) + coord_system_kwargs_default = dict( + azimuth_of_central_line=0.0, + latitude_of_projection_origin=0.0, + longitude_of_projection_origin=0.0, + false_easting=None, + false_northing=None, + scale_factor_at_projection_origin=1.0, + ellipsoid=None, + ) + + @pytest.fixture( + autouse=True, params=kwarg_permutations, ids=permutation_ids + ) + def make_variant_inputs(self, request) -> None: + """Parse a ParamTuple into usable test information.""" + inputs: ParamTuple = request.param + + self.nc_attributes = dict( + self.nc_attributes_default, **inputs.nc_attributes + ) + self.expected_class = inputs.expected_class + coord_system_kwargs_expected = dict( + self.coord_system_kwargs_default, **inputs.coord_system_kwargs + ) + + if self.expected_class is RotatedMercator: + del coord_system_kwargs_expected["azimuth_of_central_line"] + + self.coord_system_args_expected = list( + coord_system_kwargs_expected.values() + ) + + def test_attributes(self): + cf_var_mock = mock.Mock(spec=[], **self.nc_attributes) + coord_system_mock = mock.Mock(spec=self.expected_class) + setattr(coord_systems, self.expected_class.__name__, coord_system_mock) + + _ = build_oblique_mercator_coordinate_system(None, cf_var_mock) + coord_system_mock.assert_called_with(*self.coord_system_args_expected) + + +def test_deprecation(): + nc_attributes = dict( + grid_mapping_name="rotated_mercator", + latitude_of_projection_origin=0.0, + longitude_of_projection_origin=0.0, + scale_factor_at_projection_origin=1.0, + ) + cf_var_mock = mock.Mock(spec=[], **nc_attributes) + with pytest.warns(IrisDeprecation, match="azimuth_of_central_line = 90"): + _ = build_oblique_mercator_coordinate_system(None, cf_var_mock) diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index af0d7bcd30..8253e59368 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -24,7 +24,9 @@ LambertAzimuthalEqualArea, LambertConformal, Mercator, + ObliqueMercator, RotatedGeogCS, + RotatedMercator, Stereographic, TransverseMercator, VerticalPerspective, @@ -1065,6 +1067,50 @@ def test_geo_cs(self): } self._test(coord_system, expected) + def test_oblique_cs(self): + # Some none-default settings to confirm all parameters are being + # handled. + + kwargs_rotated = dict( + latitude_of_projection_origin=90.0, + longitude_of_projection_origin=45.0, + false_easting=1000000.0, + false_northing=-2000000.0, + scale_factor_at_projection_origin=0.939692620786, + ellipsoid=GeogCS(1), + ) + + # Same as rotated, but with azimuth too. + oblique_azimuth = dict(azimuth_of_central_line=45.0) + kwargs_oblique = dict(kwargs_rotated, **oblique_azimuth) + + expected_rotated = dict( + # Automatically converted to oblique_mercator in line with CF 1.11 . + grid_mapping_name=b"oblique_mercator", + # Azimuth should be automatically populated. + azimuth_of_central_line=90.0, + **kwargs_rotated, + ) + # Convert the ellipsoid + expected_rotated.update( + dict( + earth_radius=expected_rotated.pop("ellipsoid").semi_major_axis, + longitude_of_prime_meridian=0.0, + ) + ) + + # Same as rotated, but different azimuth. + expected_oblique = dict(expected_rotated, **oblique_azimuth) + + oblique = ObliqueMercator(**kwargs_oblique) + rotated = RotatedMercator(**kwargs_rotated) + + for coord_system, expected in [ + (oblique, expected_oblique), + (rotated, expected_rotated), + ]: + self._test(coord_system, expected) + if __name__ == "__main__": tests.main() From a65f3912d532ceefa5545516aacd475c3ad51bc4 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Thu, 26 Oct 2023 12:11:19 +0100 Subject: [PATCH 069/134] updated layout of top navbar (#5505) --- docs/src/conf.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/src/conf.py b/docs/src/conf.py index 7f7322c1f8..72f2ebf4ff 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -294,7 +294,9 @@ def _dotv(version): "collapse_navigation": True, "navigation_depth": 3, "show_prev_next": True, - "navbar_align": "left", + "navbar_align": "content", + # removes the search box from the top bar + "navbar_persistent": [], # TODO: review if 6 links is too crowded. "header_links_before_dropdown": 6, "github_url": "https://github.com/SciTools/iris", From cc52dca02fdfc5c7fe1f48dc1c378daf6905197f Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Thu, 26 Oct 2023 12:13:15 +0100 Subject: [PATCH 070/134] moved latest warning banner logic to conf.py (#5508) --- docs/src/_templates/layout.html | 20 -------------------- docs/src/conf.py | 10 ++++++++++ 2 files changed, 10 insertions(+), 20 deletions(-) delete mode 100644 docs/src/_templates/layout.html diff --git a/docs/src/_templates/layout.html b/docs/src/_templates/layout.html deleted file mode 100644 index 974bd12753..0000000000 --- a/docs/src/_templates/layout.html +++ /dev/null @@ -1,20 +0,0 @@ -{% extends "pydata_sphinx_theme/layout.html" %} - -{# This uses blocks. See: - https://www.sphinx-doc.org/en/master/templating.html -#} - - - {%- block docs_body %} - - {% if on_rtd and rtd_version == 'latest' %} -
- You are viewing the latest unreleased documentation - v{{ version }}. You can switch to a stable version - via the flyout menu in the bottom corner of the screen. -
-

- {%- endif %} - - {{ super() }} -{%- endblock %} diff --git a/docs/src/conf.py b/docs/src/conf.py index 72f2ebf4ff..17a0d621cb 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -331,6 +331,16 @@ def _dotv(version): }, } +# if we are building via Read The Docs and it is the latest (not stable) +if on_rtd and rtd_version == "latest": + html_theme_options[ + "announcement" + ] = f""" + You are viewing the latest unreleased documentation + {version}. You can switch to a +
stable + version.""" + rev_parse = run(["git", "rev-parse", "--short", "HEAD"], capture_output=True) commit_sha = rev_parse.stdout.decode().strip() From 7e7d40aaa3e87d1a5d63da5ce44a925eb4f8e34e Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Thu, 26 Oct 2023 13:19:03 +0100 Subject: [PATCH 071/134] Added whatsnew. (#5552) --- docs/src/whatsnew/latest.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 49698ca501..a17fb1ac2d 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -84,6 +84,10 @@ This document explains the changes made to Iris for this release :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_maps.py` to show how a colourbar may steal space from multiple axes. (:pull:`5537`) +#. `@tkknight`_ improved the top navgation bar alignment and amount of + links shown. Also improved how the warning banner is implemented. + (:pull:`5505` and :pull:`5508`) + 💼 Internal =========== From c07cad7837f26084700372dcfb6ab9f47507708f Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Mon, 30 Oct 2023 11:36:44 +0000 Subject: [PATCH 072/134] updated link (#5556) --- docs/src/whatsnew/1.4.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/whatsnew/1.4.rst b/docs/src/whatsnew/1.4.rst index 989198296c..912a1e3bad 100644 --- a/docs/src/whatsnew/1.4.rst +++ b/docs/src/whatsnew/1.4.rst @@ -58,7 +58,7 @@ Features * Use the latest release of Cartopy, v0.8.0. -.. _OPeNDAP: http://www.opendap.org/about +.. _OPeNDAP: http://www.opendap.org .. _exp-regrid: Experimental Regridding Enhancements From 8811ef7be35ca547a399f798664e5340adf6450c Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Tue, 31 Oct 2023 09:08:38 +0000 Subject: [PATCH 073/134] Exempt major release label from stalebot (#5559) --- .github/workflows/stale.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index f363410347..8e18b36491 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -64,11 +64,11 @@ jobs: # Labels on issues exempted from stale. exempt-issue-labels: - "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue, Dragon 🐉, Dragon Sub-Task 🦎" + "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue, Dragon 🐉, Dragon Sub-Task 🦎, Release: Major" # Labels on prs exempted from stale. exempt-pr-labels: - "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue, Dragon 🐉, Dragon Sub-Task 🦎" + "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue, Dragon 🐉, Dragon Sub-Task 🦎, Release: Major" # Max number of operations per run. operations-per-run: 300 From cb4669c9541e8fcd1169c0d91cf4920c28abd394 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 31 Oct 2023 10:37:45 +0000 Subject: [PATCH 074/134] [pre-commit.ci] pre-commit autoupdate (#5558) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.10.0 → 23.10.1](https://github.com/psf/black/compare/23.10.0...23.10.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index db6237d944..5aefc1da76 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: additional_dependencies: [tomli] - repo: https://github.com/psf/black - rev: 23.10.0 + rev: 23.10.1 hooks: - id: black pass_filenames: false From cec0e092af5d106a1e81a04d5e6fb21c46b259d2 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Tue, 31 Oct 2023 10:57:18 +0000 Subject: [PATCH 075/134] removed now incorrect statement. (#5555) --- docs/src/voted_issues.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/src/voted_issues.rst b/docs/src/voted_issues.rst index 0c99638bbd..33d1982a7b 100644 --- a/docs/src/voted_issues.rst +++ b/docs/src/voted_issues.rst @@ -53,5 +53,3 @@ the below table. .. note:: The data in this table is updated every 30 minutes and is sourced from `voted-issues.json`_. For the latest data please see the `issues on GitHub`_. - Note that the list on Github does not show the number of votes 👍 - only the total number of comments for the whole issue. \ No newline at end of file From e0850ebc73824c929f48d51067f88c3c2067b7fb Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Fri, 3 Nov 2023 10:42:04 +0000 Subject: [PATCH 076/134] More sensible time axis and tick labels for 2D plots (#5561) * Don't used mpl date2num * removed units for time coords due to not matching autoticks * update imagerepo.json * Revert change from #616, as this is now handled within _title * whatsnew * update test data version --------- Co-authored-by: alex chamberlain-clay --- .github/workflows/benchmarks_run.yml | 2 +- .github/workflows/ci-tests.yml | 2 +- docs/src/whatsnew/latest.rst | 3 +++ lib/iris/plot.py | 9 -------- lib/iris/quickplot.py | 17 +++----------- lib/iris/tests/results/imagerepo.json | 32 +++++++++++++-------------- 6 files changed, 24 insertions(+), 41 deletions(-) diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmarks_run.yml index 02b913c6f0..31b6499ecd 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmarks_run.yml @@ -29,7 +29,7 @@ jobs: env: IRIS_TEST_DATA_LOC_PATH: benchmarks IRIS_TEST_DATA_PATH: benchmarks/iris-test-data - IRIS_TEST_DATA_VERSION: "2.21" + IRIS_TEST_DATA_VERSION: "2.22" # Lets us manually bump the cache to rebuild ENV_CACHE_BUILD: "0" TEST_DATA_CACHE_BUILD: "2" diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 8d84d4e137..7fe06ced30 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -50,7 +50,7 @@ jobs: session: "tests" env: - IRIS_TEST_DATA_VERSION: "2.21" + IRIS_TEST_DATA_VERSION: "2.22" ENV_NAME: "ci-tests" steps: diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index a17fb1ac2d..a45f40d5cf 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -48,6 +48,9 @@ This document explains the changes made to Iris for this release #. `@ESadek-MO`_ has updated :mod:`iris.tests.graphics.idiff` to stop duplicated file names preventing acceptance. (:issue:`5098`, :pull:`5482`) +#. `@acchamber`_ and `@rcomer`_ modified 2D plots so that time axes and their + ticks have more sensible default labels. (:issue:`5426`, :pull:`5561`) + 💣 Incompatible Changes ======================= diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 28b458f715..b32b45195a 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -22,7 +22,6 @@ import matplotlib.animation as animation import matplotlib.axes import matplotlib.collections as mpl_collections -import matplotlib.dates as mpl_dates from matplotlib.offsetbox import AnchoredText import matplotlib.pyplot as plt import matplotlib.ticker as mpl_ticker @@ -454,10 +453,6 @@ def _draw_2d_from_bounds(draw_method_name, cube, *args, **kwargs): else: values = coord.contiguous_bounds() values = _fixup_dates(coord, values) - if values.dtype == np.dtype(object) and isinstance( - values[0], datetime.datetime - ): - values = mpl_dates.date2num(values) plot_arrays.append(values) @@ -557,10 +552,6 @@ def _draw_2d_from_points(draw_method_name, arg_func, cube, *args, **kwargs): ) plot_arrays.append(np.arange(values.size)) string_axes[axis_name] = values - elif values.dtype == np.dtype(object) and isinstance( - values[0], datetime.datetime - ): - plot_arrays.append(mpl_dates.date2num(values)) else: plot_arrays.append(values) diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 9209d4b3b7..12b5e80697 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -14,9 +14,7 @@ """ import cf_units -from matplotlib import __version__ as _mpl_version import matplotlib.pyplot as plt -from packaging import version import iris.config import iris.coords @@ -44,18 +42,11 @@ def _title(cube_or_coord, with_units): units.is_unknown() or units.is_no_unit() or units == cf_units.Unit("1") + or units.is_time_reference() ): if _use_symbol(units): units = units.symbol - elif units.is_time_reference(): - # iris.plot uses matplotlib.dates.date2num, which is fixed to the below unit. - if version.parse(_mpl_version) >= version.parse("3.3"): - days_since = "1970-01-01" - else: - days_since = "0001-01-01" - units = "days since {}".format(days_since) title += " / {}".format(units) - return title @@ -117,10 +108,8 @@ def _label_with_points(cube, result=None, ndims=2, coords=None, axes=None): def _get_titles(u_object, v_object): if u_object is None: u_object = iplt._u_object_from_v_object(v_object) - xunits = u_object is not None and not u_object.units.is_time_reference() - yunits = not v_object.units.is_time_reference() - xlabel = _title(u_object, with_units=xunits) - ylabel = _title(v_object, with_units=yunits) + xlabel = _title(u_object, with_units=True) + ylabel = _title(v_object, with_units=True) title = "" if u_object is None: title = _title(v_object, with_units=False) diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 2a6e2c4dbc..69beacb848 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -129,8 +129,8 @@ "iris.tests.test_plot.TestHybridHeight.test_points.3": "fe857b91917a847ec4bd3f01c47c6ca43b11915a3ea4db3b1b4a84c4c03f3fc1", "iris.tests.test_plot.TestHybridHeight.test_points.4": "b878387e978ec2f0c0f09f83878f3f81c070c0fe78d0c1763fa13856d03e3f0f", "iris.tests.test_plot.TestMissingCS.test_missing_cs.0": "fa816ac1857e853cc17e957ac15f3e8494c6c8f43e81c13b3f813e91c07e3f46", - "iris.tests.test_plot.TestMissingCoord.test_no_u.0": "ea856a95955a954ac17f954a807e3f48951ac07e3f81c0ff7ea16a81c0bf3f81", - "iris.tests.test_plot.TestMissingCoord.test_no_u.1": "ea956ab5954a954ac17e9542817f2f60950ac07f3e80c0ff7a856aa5c2ff3f80", + "iris.tests.test_plot.TestMissingCoord.test_no_u.0": "ea856a95955a956ac17f954a817e3f8c953ac07e3e81c07f7ea16a81c07e3e81", + "iris.tests.test_plot.TestMissingCoord.test_no_u.1": "ea956ab5954a954ac17e954a857f3f80954ac07f7e80c07f7a856a84c07f3f81", "iris.tests.test_plot.TestMissingCoord.test_no_v.0": "fa816a85957a857ac17e954ac17e1fa2950bc07e3e81c07f3e807a85c17f3f81", "iris.tests.test_plot.TestMissingCoord.test_no_v.1": "fa856a85957a857ac17e954ac17e9d02954ac07e3e81c07f3e857a85c2fd3f80", "iris.tests.test_plot.TestMissingCoord.test_none.0": "fa816a85957a857ac17e954ac17e3fa2950ac07e3e80c07f3e807a85c1ff3f81", @@ -168,12 +168,12 @@ "iris.tests.test_plot.TestPlotCitation.test_axes.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", "iris.tests.test_plot.TestPlotCitation.test_figure.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", "iris.tests.test_plot.TestPlotCoordinatesGiven.test_non_cube_coordinate.0": "fa81857e857e3e85857e7a81857e7a81857e7a817e81780b7a81c56a7a81857e", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.0": "ea853f10956ac1e1957a854e957a207e955e6aa76ae17aa16a856aaf6ab19e12", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.1": "ea853a85857a857a957a857a957ed05a857b3e946a606b917a816f247a853af4", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": "eafdcec9f4219530b696a56694c3852a95656b7b85986acdc06516adad186e9a", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": "aff24ab7fd05952dbd0f950f910fed48c47868f2e1b9329094266e345a850f6c", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": "eaa9b5699556854e9456854ed05625f9d0a92bfdc0a90afd81f97e00855e7ab6", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.5": "eaf73e0d9503852c950395ac9528c1fad06cc0f2d1ec6af2c0fc6a536a1797f3", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.0": "ea853e11956ac1e3957a844e957a607e955e6ae36ae17aa16a856be86ab13c32", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.1": "ea857a85857ac57a957a857a957ad05e850b3ed46e206b917a816f247a953ae4", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": "ebfdcac9bd209434b696856795cb012e95676b7b81186acdc06536ad89182fda", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": "aaff7ab2fd04902cfd0c950f9d010f4bd64069f3e1993a9894262e345ae56f6c", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": "eaa9d5129556c55695568556955623f9c0292bf9c0a90bfdc0fd7e0085562ff9", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.5": "eaf77a0d9553c52c950095ac952885ea952c87f3952c6bf3d42c6a536a57bf80", "iris.tests.test_plot.TestPlotCoordinatesGiven.test_x.0": "afea950ddb13c03e34359ad8a4c86f24913f2693806e3ff1f4087b4285fd2af2", "iris.tests.test_plot.TestPlotCoordinatesGiven.test_y.0": "afee9632de05c9d9f180d168c454a53e931b3e84954a3b8c85f94ce703ff7284", "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.0": "ea853f00957ac07c957ac0bf951a69f3c47c7a5f3a4127816b953e646b813761", @@ -202,8 +202,8 @@ "iris.tests.test_plot.TestQuickplotPlot.test_x.0": "82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1", "iris.tests.test_plot.TestQuickplotPlot.test_y.0": "a2fbb46e7f10c99f2013d863e46498dcd06c0d2798421fa5dd221e7789ff6f10", "iris.tests.test_plot.TestQuickplotPlot.test_z.0": "a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731", - "iris.tests.test_plot.TestSimple.test_bounds.0": "ea856a85954a957ac17e954ac17a9d3a956ac07e3e80c07f3e857aa5c27d3f80", - "iris.tests.test_plot.TestSimple.test_points.0": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", + "iris.tests.test_plot.TestSimple.test_bounds.0": "ea856a85955a957ac17e954ac17a9d22956ac07e3e81c07f3e857aa5c27d3f80", + "iris.tests.test_plot.TestSimple.test_points.0": "ea856a85957a957ac17e954ac17e1fa2950ac07e3e80c07d3e847a85c1ff3f81", "iris.tests.test_plot.TestSymbols.test_cloud_cover.0": "eb5291e494ad6e136b5291ec94ad6e136b5291ec94ad6e136b5291ec94ad6e13", "iris.tests.test_quickplot.TestLabels.test_alignment.0": "be813fe0954ac07fc0ff3e81c03fc97a6d0094af3f80c17f36a53240d97f2d82", "iris.tests.test_quickplot.TestLabels.test_contour.0": "a7fd955a7a016d1a3217c962e4819a56c96f3c859b624d2584de3a6999b662db", @@ -220,12 +220,12 @@ "iris.tests.test_quickplot.TestPlotHist.test_horizontal.0": "b59cc3dadb433c24c4f166039438793591a7dbdcbcdc9ccc68c697a91b139131", "iris.tests.test_quickplot.TestPlotHist.test_vertical.0": "bf80c7c6c07d7959647e343a33364b699589c6c64ec0312b9e227ad681ffcc68", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_non_cube_coordinate.0": "fe816a85857a957ac07f957ac07f3e80956ac07f3e80c07f3e813e85c07e3f80", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": "ea856a95955a956ac17f950a807e3f4e951ac07e3f81c0ff3ea16aa1c0bd3e81", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": "eaf9eec9f729943032168d66d4db896e9567497b81304aedc96514ad8d18669a", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": "a6fb4b967f00950eb00f9d0f900fcd62dc7868f2c1bb3a909c266e34daa52f6c", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": "eaa9b549f756854ea0168d6ed556896fd8a909ed88290afdd9e97e008d6e2296", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.5": "aad73e0df78085ac840195ac9528d9fad56cd8f2906c48f2d0ec7a536a1737f3", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": "ea856a95955a956ac17f950a817e3fcc951ac07e3e81c07f7ea16a85c07e3e81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": "ea856a85957a957ac17e954ac17e1fa2950ac07e3e80c07d3e847a85c1ff3f81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": "eaf9e2c9ff60b43036168d6795c2892e95674b7b80304aedc8651ead99192eda", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": "a2ff7892771d912cb4089d0ffd4b8d429c4049f3d1bb1a909c266e34dae56f68", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": "eaa9f598b756a41e8056855e955689f9d9610be988290bfdd9fdfe0089562a61", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.5": "ead7780cf7d3c5acb40095acd56999e2952899f2d5ec0bf3902c6a536a57b700", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_x.0": "a6fb958dff50c03e203598dca4c9cd26933f9cf3886e1de1dc047b4289ec2672", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_y.0": "a2ffb6127f0dc9992085d960c6748d3edb121ca49d6a1b048df34ce789ff7205", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.0": "ea856a95957a957ac07e954ac17e3e86950bc17f3ea4c27d3e833ac1c1e03f80", From 9907bce254b6ee92c419d0c9ef6843fd97f717c8 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Wed, 8 Nov 2023 14:30:59 +0000 Subject: [PATCH 077/134] DOCS: Removed broken git links. (#5569) * removed broken git links. * added whatsnew --- docs/src/conf.py | 1 - .../gitwash/development_workflow.rst | 11 +++++------ docs/src/developers_guide/gitwash/git_links.inc | 14 -------------- docs/src/whatsnew/latest.rst | 2 ++ 4 files changed, 7 insertions(+), 21 deletions(-) diff --git a/docs/src/conf.py b/docs/src/conf.py index 17a0d621cb..8be34c1989 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -390,7 +390,6 @@ def _dotv(version): "https://docs.github.com", "https://github.com", "http://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html", - "http://schacon.github.com/git", "http://scitools.github.com/cartopy", "http://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf", "https://software.ac.uk/how-cite-software", diff --git a/docs/src/developers_guide/gitwash/development_workflow.rst b/docs/src/developers_guide/gitwash/development_workflow.rst index b38ddd6e90..8545a04308 100644 --- a/docs/src/developers_guide/gitwash/development_workflow.rst +++ b/docs/src/developers_guide/gitwash/development_workflow.rst @@ -74,7 +74,7 @@ what the changes in the branch are for. For example ``add-ability-to-fly``, or git checkout my-new-feature Generally, you will want to keep your feature branches on your public github_ -fork of `iris`_. To do this, you `git push`_ this new branch up to your +fork of `iris`_. To do this, you ``git push`` this new branch up to your github repo. Generally (if you followed the instructions in these pages, and by default), git will have a link to your github repo, called ``origin``. You push up to your own repo on github with:: @@ -108,7 +108,7 @@ In More Detail -------------- #. Make some changes -#. See which files have changed with ``git status`` (see `git status`_). +#. See which files have changed with ``git status``. You'll see a listing like this one:: # On branch ny-new-feature @@ -124,16 +124,15 @@ In More Detail # INSTALL no changes added to commit (use "git add" and/or "git commit -a") -#. Check what the actual changes are with ``git diff`` (`git diff`_). -#. Add any new files to version control ``git add new_file_name`` (see - `git add`_). +#. Check what the actual changes are with ``git diff``. +#. Add any new files to version control ``git add new_file_name``. #. To commit all modified files into the local copy of your repo, do ``git commit -am 'A commit message'``. Note the ``-am`` options to ``commit``. The ``m`` flag just signals that you're going to type a message on the command line. The ``a`` flag will automatically stage all files that have been modified and deleted. #. To push the changes up to your forked repo on github, do a ``git - push`` (see `git push`_). + push``. Testing Your Changes diff --git a/docs/src/developers_guide/gitwash/git_links.inc b/docs/src/developers_guide/gitwash/git_links.inc index 42bd556c3d..bf20d13e5f 100644 --- a/docs/src/developers_guide/gitwash/git_links.inc +++ b/docs/src/developers_guide/gitwash/git_links.inc @@ -13,20 +13,6 @@ .. _github help: https://help.github.com .. _git documentation: https://git-scm.com/docs -.. _git clone: http://schacon.github.com/git/git-clone.html -.. _git checkout: http://schacon.github.com/git/git-checkout.html -.. _git commit: http://schacon.github.com/git/git-commit.html -.. _git push: http://schacon.github.com/git/git-push.html -.. _git pull: http://schacon.github.com/git/git-pull.html -.. _git add: http://schacon.github.com/git/git-add.html -.. _git status: http://schacon.github.com/git/git-status.html -.. _git diff: http://schacon.github.com/git/git-diff.html -.. _git log: http://schacon.github.com/git/git-log.html -.. _git branch: http://schacon.github.com/git/git-branch.html -.. _git remote: http://schacon.github.com/git/git-remote.html -.. _git rebase: http://schacon.github.com/git/git-rebase.html -.. _git config: http://schacon.github.com/git/git-config.html - .. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html .. |emdash| unicode:: U+02014 diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index a45f40d5cf..5aa87f6473 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -91,6 +91,8 @@ This document explains the changes made to Iris for this release links shown. Also improved how the warning banner is implemented. (:pull:`5505` and :pull:`5508`) +#. `@tkknight`_ removed broken git links. (:pull:`5569`) + 💼 Internal =========== From 0dc2dd9c0c4a58aafcb11408db806e869fe3d6af Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Thu, 9 Nov 2023 12:04:40 +0000 Subject: [PATCH 078/134] fixed spacing (#5572) --- .../dask_best_practices/index.rst | 10 ++--- docs/src/further_topics/ugrid/data_model.rst | 28 ++++++------- docs/src/techpapers/um_files_loading.rst | 42 +++++++++---------- docs/src/userguide/navigating_a_cube.rst | 8 ++-- docs/src/userguide/real_and_lazy_data.rst | 23 +++++----- 5 files changed, 55 insertions(+), 56 deletions(-) diff --git a/docs/src/further_topics/dask_best_practices/index.rst b/docs/src/further_topics/dask_best_practices/index.rst index eb3321345b..f126427d3f 100644 --- a/docs/src/further_topics/dask_best_practices/index.rst +++ b/docs/src/further_topics/dask_best_practices/index.rst @@ -144,8 +144,8 @@ Iris provides a basic chunking shape to Dask, attempting to set the shape for best performance. The chunking that is used can depend on the file format that is being loaded. See below for how chunking is performed for: - * :ref:`chunking_netcdf` - * :ref:`chunking_pp_ff` +* :ref:`chunking_netcdf` +* :ref:`chunking_pp_ff` It can in some cases be beneficial to re-chunk the arrays in Iris cubes. For information on how to do this, see :ref:`dask_rechunking`. @@ -208,9 +208,9 @@ If you feel you have an example of a Dask best practice that you think may be he please share them with us by raising a new `discussion on the Iris repository `_. - * :doc:`dask_pp_to_netcdf` - * :doc:`dask_parallel_loop` - * :doc:`dask_bags_and_greed` +* :doc:`dask_pp_to_netcdf` +* :doc:`dask_parallel_loop` +* :doc:`dask_bags_and_greed` .. toctree:: :hidden: diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/further_topics/ugrid/data_model.rst index cc3cc7b793..208254ada6 100644 --- a/docs/src/further_topics/ugrid/data_model.rst +++ b/docs/src/further_topics/ugrid/data_model.rst @@ -484,20 +484,20 @@ How UGRID information is stored | Described in detail in `MeshCoords`_. | Stores the following information: - * | :attr:`~iris.experimental.ugrid.MeshCoord.mesh` - | The :class:`~iris.experimental.ugrid.Mesh` associated with this - :class:`~iris.experimental.ugrid.MeshCoord`. This determines the - :attr:`~iris.cube.Cube.mesh` attribute of any :class:`~iris.cube.Cube` - this :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see - `The Basics`_) - - * | :attr:`~iris.experimental.ugrid.MeshCoord.location` - | ``node``/``edge``/``face`` - the element detailed by this - :class:`~iris.experimental.ugrid.MeshCoord`. This determines the - :attr:`~iris.cube.Cube.location` attribute of any - :class:`~iris.cube.Cube` this - :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see - `The Basics`_). + * | :attr:`~iris.experimental.ugrid.MeshCoord.mesh` + | The :class:`~iris.experimental.ugrid.Mesh` associated with this + :class:`~iris.experimental.ugrid.MeshCoord`. This determines the + :attr:`~iris.cube.Cube.mesh` attribute of any :class:`~iris.cube.Cube` + this :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see + `The Basics`_) + + * | :attr:`~iris.experimental.ugrid.MeshCoord.location` + | ``node``/``edge``/``face`` - the element detailed by this + :class:`~iris.experimental.ugrid.MeshCoord`. This determines the + :attr:`~iris.cube.Cube.location` attribute of any + :class:`~iris.cube.Cube` this + :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see + `The Basics`_). .. _ugrid MeshCoords: diff --git a/docs/src/techpapers/um_files_loading.rst b/docs/src/techpapers/um_files_loading.rst index f8c94cab08..f94898b3aa 100644 --- a/docs/src/techpapers/um_files_loading.rst +++ b/docs/src/techpapers/um_files_loading.rst @@ -125,21 +125,21 @@ with latitude and longitude axes are also supported). For an ordinary latitude-longitude grid, the cubes have coordinates called 'longitude' and 'latitude': - * These are mapped to the appropriate data dimensions. - * They have units of 'degrees'. - * They have a coordinate system of type :class:`iris.coord_systems.GeogCS`. - * The coordinate points are normally set to the regular sequence - ``ZDX/Y + BDX/Y * (1 .. LBNPT/LBROW)`` (*except*, if BDX/BDY is zero, the - values are taken from the extra data vector X/Y, if present). - * If X/Y_LOWER_BOUNDS extra data is available, this appears as bounds values - of the horizontal coordinates. +* These are mapped to the appropriate data dimensions. +* They have units of 'degrees'. +* They have a coordinate system of type :class:`iris.coord_systems.GeogCS`. +* The coordinate points are normally set to the regular sequence + ``ZDX/Y + BDX/Y * (1 .. LBNPT/LBROW)`` (*except*, if BDX/BDY is zero, the + values are taken from the extra data vector X/Y, if present). +* If X/Y_LOWER_BOUNDS extra data is available, this appears as bounds values + of the horizontal coordinates. For **rotated** latitude-longitude coordinates (as for LBCODE=101), the horizontal coordinates differ only slightly -- - * The names are 'grid_latitude' and 'grid_longitude'. - * The coord_system is a :class:`iris.coord_systems.RotatedGeogCS`, created - with a pole defined by BPLAT, BPLON. +* The names are 'grid_latitude' and 'grid_longitude'. +* The coord_system is a :class:`iris.coord_systems.RotatedGeogCS`, created + with a pole defined by BPLAT, BPLON. For example: >>> # Load a PP field. @@ -304,10 +304,9 @@ For hybrid height levels (LBVC=65): multidimensional or non-monotonic. See an example printout of a hybrid height cube, -:ref:`here `: - - Notice that this contains all of the above coordinates -- - 'model_level_number', 'sigma', 'level_height' and the derived 'altitude'. +:ref:`here `. Notice that this contains all of the +above coordinates -- ``model_level_number``, ``sigma``, ``level_height`` and +the derived ``altitude``. .. note:: @@ -364,7 +363,7 @@ Data at a single measurement timepoint (LBTIM.IB=0): defined according to LBTIM.IC. Values forecast from T2, valid at T1 (LBTIM.IB=1): - Coordinates ``time` and ``forecast_reference_time`` are created from the T1 + Coordinates ``time`` and ``forecast_reference_time`` are created from the T1 and T2 values, respectively. These have no bounds, and units of 'hours since 1970-01-01 00:00:00', with the appropriate calendar. A ``forecast_period`` coordinate is also created, with values T1-T2, no @@ -383,12 +382,11 @@ these may become dimensions of the resulting data cube. This will depend on the values actually present in the source fields for each of the elements. See an example printout of a forecast data cube, -:ref:`here ` : - - Notice that this example contains all of the above coordinates -- 'time', - 'forecast_period' and 'forecast_reference_time'. In this case the data are - forecasts, so 'time' is a dimension, 'forecast_period' varies with time and - 'forecast_reference_time' is a constant. +:ref:`here `. Notice that this example +contains all of the above coordinates -- ``time``, ``forecast_period`` and +``forecast_reference_time``. In this case the data are forecasts, so ``time`` +is a dimension, ``forecast_period``` varies with time and +``forecast_reference_time`` is a constant. Statistical Measures diff --git a/docs/src/userguide/navigating_a_cube.rst b/docs/src/userguide/navigating_a_cube.rst index b4c16b094b..ec3cd8e0dc 100644 --- a/docs/src/userguide/navigating_a_cube.rst +++ b/docs/src/userguide/navigating_a_cube.rst @@ -191,10 +191,10 @@ Adding and Removing Metadata to the Cube at Load Time Sometimes when loading a cube problems occur when the amount of metadata is more or less than expected. This is often caused by one of the following: - * The file does not contain enough metadata, and therefore the cube cannot know everything about the file. - * Some of the metadata of the file is contained in the filename, but is not part of the actual file. - * There is not enough metadata loaded from the original file as Iris has not handled the format fully. *(in which case, - please let us know about it)* +* The file does not contain enough metadata, and therefore the cube cannot know everything about the file. +* Some of the metadata of the file is contained in the filename, but is not part of the actual file. +* There is not enough metadata loaded from the original file as Iris has not handled the format fully. *(in which case, + please let us know about it)* To solve this, all of :func:`iris.load`, :func:`iris.load_cube`, and :func:`iris.load_cubes` support a callback keyword. diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/userguide/real_and_lazy_data.rst index ef4de0c429..e4c041886c 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/userguide/real_and_lazy_data.rst @@ -247,20 +247,21 @@ output file, to be performed by `Dask `_ lat thus enabling parallel save operations. This works in the following way : - 1. an :func:`iris.save` call is made, with a NetCDF file output and the additional - keyword ``compute=False``. - This is currently *only* available when saving to NetCDF, so it is documented in - the Iris NetCDF file format API. See: :func:`iris.fileformats.netcdf.save`. - 2. the call creates the output file, but does not fill in variables' data, where - the data is a lazy array in the Iris object. Instead, these variables are - initially created "empty". +1. an :func:`iris.save` call is made, with a NetCDF file output and the additional + keyword ``compute=False``. + This is currently *only* available when saving to NetCDF, so it is documented in + the Iris NetCDF file format API. See: :func:`iris.fileformats.netcdf.save`. - 3. the :meth:`~iris.save` call returns a ``result`` which is a - :class:`~dask.delayed.Delayed` object. +2. the call creates the output file, but does not fill in variables' data, where + the data is a lazy array in the Iris object. Instead, these variables are + initially created "empty". - 4. the save can be completed later by calling ``result.compute()``, or by passing it - to the :func:`dask.compute` call. +3. the :meth:`~iris.save` call returns a ``result`` which is a + :class:`~dask.delayed.Delayed` object. + +4. the save can be completed later by calling ``result.compute()``, or by passing it + to the :func:`dask.compute` call. The benefit of this, is that costly data transfer operations can be performed in parallel with writes to other data files. Also, where array contents are calculated From ce813ffcce63458ffc14f594847418871184e35c Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Thu, 9 Nov 2023 14:29:27 +0000 Subject: [PATCH 079/134] Added xarray phrasebook doc page (#5564) --- docs/src/community/index.rst | 2 + docs/src/community/iris_xarray.rst | 1 + docs/src/community/phrasebook.rst | 66 ++++++++++++++++++++++++++++++ docs/src/whatsnew/latest.rst | 3 ++ 4 files changed, 72 insertions(+) create mode 100644 docs/src/community/phrasebook.rst diff --git a/docs/src/community/index.rst b/docs/src/community/index.rst index 114cb96fe9..ee227513b3 100644 --- a/docs/src/community/index.rst +++ b/docs/src/community/index.rst @@ -40,12 +40,14 @@ smoother interoperability: * The :mod:`iris.pandas` module * :doc:`iris_xarray` +* :doc:`phrasebook` .. toctree:: :maxdepth: 1 :hidden: iris_xarray + phrasebook Plugins ------- diff --git a/docs/src/community/iris_xarray.rst b/docs/src/community/iris_xarray.rst index 2250e3c0a3..9d795fcd9e 100644 --- a/docs/src/community/iris_xarray.rst +++ b/docs/src/community/iris_xarray.rst @@ -7,6 +7,7 @@ Iris ❤️ :term:`Xarray` There is a lot of overlap between Iris and :term:`Xarray`, but some important differences too. Below is a summary of the most important differences, so that you can be prepared, and to help you choose the best package for your use case. +See :doc:`phrasebook` for a broad comparison of terminology. Overall Experience ------------------ diff --git a/docs/src/community/phrasebook.rst b/docs/src/community/phrasebook.rst new file mode 100644 index 0000000000..bcd91cca83 --- /dev/null +++ b/docs/src/community/phrasebook.rst @@ -0,0 +1,66 @@ +.. include:: ../common_links.inc + +.. _phrasebook: + +Package Phrasebook +=================== + +There are a number of similar packages to Iris, and a lot of these have their own +terminology for similar things. Whether you're coming or going, we hope this might +be a helpful guide to these differences! +Definitions for each can be found in :ref:`glossary`. See also +`Xarray terminology `_. + +.. list-table:: Phrasebook + :widths: 25 25 25 50 + :header-rows: 1 + + * - Iris + - Xarray + - Example + - Notes + * - Non-Lazy + - Eager + - + - Used to relate to functions, rather than the data. + * - Cube + - DataArray + - + - + * - CubeList + - Dataset + - + - Though similar, a CubeList is a simpler object, and is + not a perfect comparison to a Dataset + * - Merge/ Concatenate + - Concatenate + - `Xarray concatenate `_ + - Xarray's concatenate has the capability to largely do what both + Iris merge and Iris concatenate do. However, this is not a perfect comparison, + please see the link for more information. + * - + - Merge + - `Xarray merge `_ + - Xarray's Merge function doesn't map neatly map to any Iris feature. + Please see the link for more information. + * - Scalar Coordinate + - + - + - Iris makes a distinction between scalar coordinates and non-scalar coordinates, + whereas xarray documentation makes a distinction between scalar and non-scalar *data*. + It is possible to make coordinates with scalar data in both Iris and xarray + but only Iris will label such coordinates. + * - AuxCoord + - Non-Dimensional Coordinate + - + - Coordinates in Iris and xarray are categorised using different rules, + and so are not a one-to-one match. + * - DimCoord + - Dimension Coordinate + - + - Coordinates in Iris and xarray are categorised using different rules, + and so are not a one-to-one match. + +---- + +`To top `_ \ No newline at end of file diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 5aa87f6473..c4be2ab7d2 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -93,6 +93,9 @@ This document explains the changes made to Iris for this release #. `@tkknight`_ removed broken git links. (:pull:`5569`) +#. `@ESadek-MO`_ added a phrasebook for synonymous terms used in similar + packages. (:pull:`5564`) + 💼 Internal =========== From 017cec701caa359d02ea39f96c746416bbc6e6ba Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Thu, 9 Nov 2023 16:48:54 +0000 Subject: [PATCH 080/134] Allow `add_season_year()` to optionally send spans backwards (#5573) * New preceding year functionality for add_season_year(). * Modernise test_coord_categorisation. * New test for backwards behaviour. * What's New entry. * Rename to use_year_at_season_start. * Use np.zeros. --- docs/src/whatsnew/latest.rst | 7 + lib/iris/coord_categorisation.py | 77 ++++-- lib/iris/tests/test_coord_categorisation.py | 197 -------------- .../test_coord_categorisation.py | 252 ++++++++++++++++++ 4 files changed, 307 insertions(+), 226 deletions(-) delete mode 100644 lib/iris/tests/test_coord_categorisation.py create mode 100644 lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index c4be2ab7d2..33a4a211d9 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -38,6 +38,13 @@ This document explains the changes made to Iris for this release and :class:`~iris.coord_systems.RotatedMercator` coordinate systems, complete with NetCDF loading and saving. (:pull:`5548`) +#. `@trexfeathers`_ added the ``use_year_at_season_start`` parameter to + :func:`iris.coord_categorisation.add_season_year`. When + ``use_year_at_season_start==True``: seasons spanning the year boundary (e.g. + Winter - December to February) will be assigned to the preceding year (e.g. + the year of December) instead of the following year (the default behaviour). + (:pull:`5573`) + 🐛 Bugs Fixed ============= diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index 698b4828f1..b6cc79f253 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -271,20 +271,33 @@ def _validate_seasons(seasons): return -def _month_year_adjusts(seasons): - """Compute the year adjustments required for each month. +def _month_year_adjusts(seasons, use_year_at_season_start=False): + """ + Compute the year adjustments required for each month. - These determine whether the month belongs to a season in the same - year or is in the start of a season that counts towards the next - year. + These adjustments ensure that no season spans two years by assigning months + to the **next** year (use_year_at_season_start is False) or the + **previous** year (use_year_at_season_start is True). E.g. Winter - djf: + either assign Dec to the next year, or Jan and Feb to the previous year. """ - month_year_adjusts = [None, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + # 1 'slot' for each month, with an extra leading 'slot' because months + # are 1-indexed - January is 1, therefore corresponding to the 2nd + # array index. + month_year_adjusts = np.zeros(13, dtype=int) + for season in seasons: - months = _months_in_season(season) - for month in months: - if month > months[-1]: - month_year_adjusts[month] = 1 + months = np.array(_months_in_season(season)) + if use_year_at_season_start: + months_to_shift = months < months[0] + year_shift = -1 + else: + # Sending forwards. + months_to_shift = months > months[-1] + year_shift = 1 + indices_to_shift = months[np.flatnonzero(months_to_shift)] + month_year_adjusts[indices_to_shift] = year_shift + return month_year_adjusts @@ -383,34 +396,40 @@ def _season_number(coord, value): def add_season_year( - cube, coord, name="season_year", seasons=("djf", "mam", "jja", "son") + cube, + coord, + name="season_year", + seasons=("djf", "mam", "jja", "son"), + use_year_at_season_start=False, ): """ - Add a categorical year-of-season coordinate, with user specified - seasons. - - Args: - - * cube (:class:`iris.cube.Cube`): - The cube containing 'coord'. The new coord will be added into - it. - * coord (:class:`iris.coords.Coord` or string): - Coordinate in 'cube', or its name, representing time. - - Kwargs: - - * name (string): - Name of the created coordinate. Defaults to "season_year". - * seasons (:class:`list` of strings): + Add a categorical year-of-season coordinate, with user specified seasons. + + Parameters + ---------- + cube : :class:`iris.cube.Cube` + The cube containing `coord`. The new coord will be added into it. + coord : :class:`iris.coords.Coord` or str + Coordinate in `cube`, or its name, representing time. + name : str, default="season_year" + Name of the created coordinate. + seasons : tuple of str, default=("djf", "mam", "jja", "son") List of seasons defined by month abbreviations. Each month must appear once and only once. Defaults to standard meteorological - seasons ('djf', 'mam', 'jja', 'son'). + seasons (``djf``, ``mam``, ``jja``, ``son``). + use_year_at_season_start: bool, default=False + Seasons spanning the year boundary (e.g. Winter ``djf``) will belong + fully to the following year by default (e.g. the year of Jan and Feb). + Set to ``True`` for spanning seasons to belong to the preceding + year (e.g. the year of Dec) instead. """ # Check that the seasons are valid. _validate_seasons(seasons) # Define the adjustments to be made to the year. - month_year_adjusts = _month_year_adjusts(seasons) + month_year_adjusts = _month_year_adjusts( + seasons, use_year_at_season_start=use_year_at_season_start + ) # Define a categorisation function. def _season_year(coord, value): diff --git a/lib/iris/tests/test_coord_categorisation.py b/lib/iris/tests/test_coord_categorisation.py deleted file mode 100644 index 0206ba66a5..0000000000 --- a/lib/iris/tests/test_coord_categorisation.py +++ /dev/null @@ -1,197 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test the coordinate categorisation functions. -""" - -# import iris tests first so that some things can be initialised before importing anything else -import iris.tests as tests # isort:skip - -import warnings - -import cf_units -import numpy as np - -import iris -import iris.coord_categorisation as ccat - -CATEGORISATION_FUNCS = ( - ccat.add_day_of_month, - ccat.add_day_of_year, - ccat.add_weekday, - ccat.add_weekday_fullname, - ccat.add_weekday_number, - ccat.add_month, - ccat.add_month_fullname, - ccat.add_month_number, - ccat.add_year, - ccat.add_season, - ccat.add_season_number, - ccat.add_season_year, - ccat.add_season_membership, -) - - -class TestCategorisations(tests.IrisTest): - def setUp(self): - # make a series of 'day numbers' for the time, that slide across month - # boundaries - day_numbers = np.arange(0, 600, 27, dtype=np.int32) - - cube = iris.cube.Cube( - day_numbers, long_name="test cube", units="metres" - ) - - # use day numbers as data values also (don't actually use this for - # anything) - cube.data = day_numbers - - time_coord = iris.coords.DimCoord( - day_numbers, - standard_name="time", - units=cf_units.Unit("days since epoch", "standard"), - ) - cube.add_dim_coord(time_coord, 0) - - self.cube = cube - self.time_coord = time_coord - - def test_bad_coord(self): - for func in CATEGORISATION_FUNCS: - kwargs = {"name": "my_category"} - if func is ccat.add_season_membership: - kwargs["season"] = "djf" - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - func(self.cube, "DOES NOT EXIST", **kwargs) - - def test_explicit_result_names(self): - result_name = "my_category" - fmt = "Missing/incorrectly named result for {0!r}" - for func in CATEGORISATION_FUNCS: - # Specify source coordinate by name - cube = self.cube.copy() - kwargs = {"name": result_name} - if func is ccat.add_season_membership: - kwargs["season"] = "djf" - with warnings.catch_warnings(record=True): - func(cube, "time", **kwargs) - result_coords = cube.coords(result_name) - self.assertEqual(len(result_coords), 1, fmt.format(func.__name__)) - # Specify source coordinate by coordinate reference - cube = self.cube.copy() - time = cube.coord("time") - with warnings.catch_warnings(record=True): - func(cube, time, **kwargs) - result_coords = cube.coords(result_name) - self.assertEqual(len(result_coords), 1, fmt.format(func.__name__)) - - def test_basic(self): - cube = self.cube - time_coord = self.time_coord - - ccat.add_year(cube, time_coord, "my_year") - ccat.add_day_of_month(cube, time_coord, "my_day_of_month") - ccat.add_day_of_year(cube, time_coord, "my_day_of_year") - - ccat.add_month(cube, time_coord, "my_month") - ccat.add_month_fullname(cube, time_coord, "my_month_fullname") - ccat.add_month_number(cube, time_coord, "my_month_number") - - ccat.add_weekday(cube, time_coord, "my_weekday") - ccat.add_weekday_number(cube, time_coord, "my_weekday_number") - ccat.add_weekday_fullname(cube, time_coord, "my_weekday_fullname") - - ccat.add_season(cube, time_coord, "my_season") - ccat.add_season_number(cube, time_coord, "my_season_number") - ccat.add_season_year(cube, time_coord, "my_season_year") - - # also test 'generic' categorisation interface - def _month_in_quarter(coord, pt_value): - date = coord.units.num2date(pt_value) - return (date.month - 1) % 3 - - ccat.add_categorised_coord( - cube, "my_month_in_quarter", time_coord, _month_in_quarter - ) - - # To ensure consistent results between 32-bit and 64-bit - # platforms, ensure all the numeric categorisation coordinates - # are always stored as int64. - for coord in cube.coords(): - if coord.long_name is not None and coord.points.dtype.kind == "i": - coord.points = coord.points.astype(np.int64) - - # check values - self.assertCML(cube, ("categorisation", "quickcheck.cml")) - - def test_add_season_nonstandard(self): - # season categorisations work for non-standard seasons? - cube = self.cube - time_coord = self.time_coord - seasons = ["djfm", "amjj", "ason"] - ccat.add_season(cube, time_coord, name="seasons", seasons=seasons) - ccat.add_season_number( - cube, time_coord, name="season_numbers", seasons=seasons - ) - ccat.add_season_year( - cube, time_coord, name="season_years", seasons=seasons - ) - self.assertCML(cube, ("categorisation", "customcheck.cml")) - - def test_add_season_membership(self): - # season membership identifies correct seasons? - season = "djf" - ccat.add_season_membership(self.cube, "time", season, name="in_season") - ccat.add_season(self.cube, "time") - coord_season = self.cube.coord("season") - coord_membership = self.cube.coord("in_season") - season_locations = np.where(coord_season.points == season)[0] - membership_locations = np.where(coord_membership.points)[0] - self.assertArrayEqual(membership_locations, season_locations) - - def test_add_season_invalid_spec(self): - # custom seasons with an invalid season raises an error? - seasons = ("djf", "maj", "jja", "son") # MAJ not a season! - for func in ( - ccat.add_season, - ccat.add_season_year, - ccat.add_season_number, - ): - with self.assertRaises(ValueError): - func(self.cube, "time", name="my_category", seasons=seasons) - - def test_add_season_repeated_months(self): - # custom seasons with repeated months raises an error? - seasons = ("djfm", "mam", "jja", "son") - for func in ( - ccat.add_season, - ccat.add_season_year, - ccat.add_season_number, - ): - with self.assertRaises(ValueError): - func(self.cube, "time", name="my_category", seasons=seasons) - - def test_add_season_missing_months(self): - # custom seasons with missing months raises an error? - seasons = ("djfm", "amjj") - for func in ( - ccat.add_season, - ccat.add_season_year, - ccat.add_season_number, - ): - with self.assertRaises(ValueError): - func(self.cube, "time", name="my_category", seasons=seasons) - - def test_add_season_membership_invalid_spec(self): - season = "maj" # not a season! - with self.assertRaises(ValueError): - ccat.add_season_membership( - self.cube, "time", season, name="maj_season" - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py new file mode 100644 index 0000000000..fbc3514147 --- /dev/null +++ b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py @@ -0,0 +1,252 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Test the coordinate categorisation functions. +""" + +import warnings + +import cf_units +import numpy as np +import pytest + +import iris +import iris.coord_categorisation as ccat +import iris.coords +import iris.cube +import iris.exceptions +from iris.tests import IrisTest + + +@pytest.fixture( + scope="module", + params=( + ccat.add_day_of_month, + ccat.add_day_of_year, + ccat.add_weekday, + ccat.add_weekday_fullname, + ccat.add_weekday_number, + ccat.add_month, + ccat.add_month_fullname, + ccat.add_month_number, + ccat.add_year, + ccat.add_season, + ccat.add_season_number, + ccat.add_season_year, + ccat.add_season_membership, + ), +) +def categorisation_func(request): + return request.param + + +@pytest.fixture( + scope="module", + params=( + ccat.add_season, + ccat.add_season_number, + ccat.add_season_year, + ), +) +def season_cat_func(request): + return request.param + + +@pytest.fixture(scope="module") +def day_numbers(): + # make a series of 'day numbers' for the time, that slide across month + # boundaries + return np.arange(0, 600, 27, dtype=np.int32) + + +@pytest.fixture +def time_coord(day_numbers): + return iris.coords.DimCoord( + day_numbers, + standard_name="time", + units=cf_units.Unit("days since epoch", "standard"), + ) + + +@pytest.fixture +def cube(day_numbers, time_coord): + _cube = iris.cube.Cube(day_numbers, long_name="test cube", units="metres") + # use day numbers as data values also (don't actually use this for + # anything) + _cube.data = day_numbers + _cube.add_dim_coord(time_coord, 0) + return _cube + + +def test_bad_coord(cube, categorisation_func): + kwargs = {"name": "my_category"} + if categorisation_func is ccat.add_season_membership: + kwargs["season"] = "djf" + with pytest.raises(iris.exceptions.CoordinateNotFoundError): + categorisation_func(cube, "DOES NOT EXIST", **kwargs) + + +def test_explicit_result_names(cube, categorisation_func): + result_name = "my_category" + fmt = "Missing/incorrectly named result for {0!r}" + # Specify source coordinate by name + new_cube = cube.copy() + kwargs = {"name": result_name} + if categorisation_func is ccat.add_season_membership: + kwargs["season"] = "djf" + with warnings.catch_warnings(record=True): + categorisation_func(new_cube, "time", **kwargs) + result_coords = new_cube.coords(result_name) + assert len(result_coords) == 1, fmt.format(categorisation_func.__name__) + # Specify source coordinate by coordinate reference + new_cube = cube.copy() + time = new_cube.coord("time") + with warnings.catch_warnings(record=True): + categorisation_func(new_cube, time, **kwargs) + result_coords = new_cube.coords(result_name) + assert len(result_coords) == 1, fmt.format(categorisation_func.__name__) + + +def test_basic(cube, time_coord): + ccat.add_year(cube, time_coord, "my_year") + ccat.add_day_of_month(cube, time_coord, "my_day_of_month") + ccat.add_day_of_year(cube, time_coord, "my_day_of_year") + + ccat.add_month(cube, time_coord, "my_month") + ccat.add_month_fullname(cube, time_coord, "my_month_fullname") + ccat.add_month_number(cube, time_coord, "my_month_number") + + ccat.add_weekday(cube, time_coord, "my_weekday") + ccat.add_weekday_number(cube, time_coord, "my_weekday_number") + ccat.add_weekday_fullname(cube, time_coord, "my_weekday_fullname") + + ccat.add_season(cube, time_coord, "my_season") + ccat.add_season_number(cube, time_coord, "my_season_number") + ccat.add_season_year(cube, time_coord, "my_season_year") + + # also test 'generic' categorisation interface + def _month_in_quarter(coord, pt_value): + date = coord.units.num2date(pt_value) + return (date.month - 1) % 3 + + ccat.add_categorised_coord( + cube, "my_month_in_quarter", time_coord, _month_in_quarter + ) + + # To ensure consistent results between 32-bit and 64-bit + # platforms, ensure all the numeric categorisation coordinates + # are always stored as int64. + for coord in cube.coords(): + if coord.long_name is not None and coord.points.dtype.kind == "i": + coord.points = coord.points.astype(np.int64) + + # check values + IrisTest.assertCML(IrisTest(), cube, ("categorisation", "quickcheck.cml")) + + +def test_add_season_nonstandard(cube, time_coord): + # season categorisations work for non-standard seasons? + seasons = ["djfm", "amjj", "ason"] + ccat.add_season(cube, time_coord, name="seasons", seasons=seasons) + ccat.add_season_number( + cube, time_coord, name="season_numbers", seasons=seasons + ) + ccat.add_season_year( + cube, time_coord, name="season_years", seasons=seasons + ) + IrisTest.assertCML(IrisTest(), cube, ("categorisation", "customcheck.cml")) + + +@pytest.mark.parametrize("backwards", [None, False, True]) +@pytest.mark.parametrize( + "nonstandard", + [False, True], + ids=["standard_seasons", "nonstandard_seasons"], +) +def test_add_season_year(cube, time_coord, backwards, nonstandard): + """Specific test to account for the extra use_year_at_season_start argument.""" + + kwargs = dict( + cube=cube, + coord=time_coord, + name="season_years", + use_year_at_season_start=backwards, + ) + if nonstandard: + kwargs["seasons"] = ["ndjfm", "amjj", "aso"] + + # Based on the actual years of each date. + expected_years = np.array(([1970] * 14) + ([1971] * 9)) + # Subset to just the 'season' of interest. + season_slice = np.s_[12:17] + expected_years = expected_years[season_slice] + + # Single indices to examine to test the handling of specific months. + nov = 0 + dec = 1 + jan = 2 + feb = 3 + mar = 4 + + # Set the expected deviations from the actual date years. + if backwards is True: + expected_years[jan] = 1970 + expected_years[feb] = 1970 + if nonstandard: + expected_years[mar] = 1970 + else: + # Either False or None - False being the default behaviour. + expected_years[dec] = 1971 + if nonstandard: + expected_years[nov] = 1971 + + ccat.add_season_year(**kwargs) + actual_years = cube.coord(kwargs["name"]).points + # Subset to just the 'season' of interest. + actual_years = actual_years[season_slice] + + np.testing.assert_array_almost_equal(actual_years, expected_years) + + +def test_add_season_membership(cube): + # season membership identifies correct seasons? + season = "djf" + ccat.add_season_membership(cube, "time", season, name="in_season") + ccat.add_season(cube, "time") + coord_season = cube.coord("season") + coord_membership = cube.coord("in_season") + season_locations = np.where(coord_season.points == season)[0] + membership_locations = np.where(coord_membership.points)[0] + np.testing.assert_array_almost_equal( + membership_locations, season_locations + ) + + +def test_add_season_invalid_spec(cube, season_cat_func): + # custom seasons with an invalid season raises an error? + seasons = ("djf", "maj", "jja", "son") # MAJ not a season! + with pytest.raises(ValueError): + season_cat_func(cube, "time", name="my_category", seasons=seasons) + + +def test_add_season_repeated_months(cube, season_cat_func): + # custom seasons with repeated months raises an error? + seasons = ("djfm", "mam", "jja", "son") + with pytest.raises(ValueError): + season_cat_func(cube, "time", name="my_category", seasons=seasons) + + +def test_add_season_missing_months(cube, season_cat_func): + # custom seasons with missing months raises an error? + seasons = ("djfm", "amjj") + with pytest.raises(ValueError): + season_cat_func(cube, "time", name="my_category", seasons=seasons) + + +def test_add_season_membership_invalid_spec(cube): + season = "maj" # not a season! + with pytest.raises(ValueError): + ccat.add_season_membership(cube, "time", season, name="maj_season") From c82ec9caafacf7a04999218fabbdf90fa6fa0949 Mon Sep 17 00:00:00 2001 From: lbdreyer Date: Mon, 13 Nov 2023 17:24:26 +0000 Subject: [PATCH 081/134] Relicense to from LGPL-3 to BSD-3 (#5577) * Relicense to BSD-3 * Fix docs test line numbers * Add whats new * Update docs/src/whatsnew/latest.rst --------- Co-authored-by: Bill Little --- COPYING | 674 ------------------ COPYING.LESSER | 165 ----- LICENSE | 29 + MANIFEST.in | 3 +- README.md | 2 +- benchmarks/asv_delegated_conda.py | 5 +- benchmarks/benchmarks/__init__.py | 5 +- benchmarks/benchmarks/aux_factory.py | 5 +- benchmarks/benchmarks/coords.py | 5 +- benchmarks/benchmarks/cperf/__init__.py | 5 +- benchmarks/benchmarks/cperf/equality.py | 5 +- benchmarks/benchmarks/cperf/load.py | 5 +- benchmarks/benchmarks/cperf/save.py | 5 +- benchmarks/benchmarks/cube.py | 5 +- .../benchmarks/experimental/__init__.py | 5 +- .../benchmarks/experimental/ugrid/__init__.py | 5 +- .../experimental/ugrid/regions_combine.py | 5 +- .../benchmarks/generate_data/__init__.py | 5 +- benchmarks/benchmarks/generate_data/stock.py | 5 +- benchmarks/benchmarks/generate_data/ugrid.py | 5 +- .../benchmarks/generate_data/um_files.py | 5 +- benchmarks/benchmarks/import_iris.py | 5 +- benchmarks/benchmarks/iterate.py | 5 +- benchmarks/benchmarks/load/__init__.py | 5 +- benchmarks/benchmarks/load/ugrid.py | 5 +- .../benchmarks/metadata_manager_factory.py | 5 +- benchmarks/benchmarks/mixin.py | 5 +- benchmarks/benchmarks/plot.py | 5 +- benchmarks/benchmarks/regridding.py | 5 +- benchmarks/benchmarks/save.py | 5 +- benchmarks/benchmarks/sperf/__init__.py | 5 +- .../benchmarks/sperf/combine_regions.py | 5 +- benchmarks/benchmarks/sperf/equality.py | 5 +- benchmarks/benchmarks/sperf/load.py | 5 +- benchmarks/benchmarks/sperf/save.py | 5 +- benchmarks/benchmarks/trajectory.py | 5 +- benchmarks/bm_runner.py | 5 +- docs/gallery_tests/__init__.py | 5 +- docs/gallery_tests/conftest.py | 5 +- docs/gallery_tests/test_gallery_examples.py | 5 +- docs/src/conf.py | 5 +- docs/src/copyright.rst | 9 +- .../src/further_topics/filtering_warnings.rst | 16 +- docs/src/sphinxext/api_rst_formatting.py | 5 +- docs/src/whatsnew/latest.rst | 2 +- lib/iris/__init__.py | 5 +- lib/iris/_concatenate.py | 5 +- lib/iris/_constraints.py | 5 +- lib/iris/_data_manager.py | 5 +- lib/iris/_deprecation.py | 5 +- lib/iris/_lazy_data.py | 5 +- lib/iris/_merge.py | 5 +- lib/iris/_representation/__init__.py | 5 +- lib/iris/_representation/cube_printout.py | 5 +- lib/iris/_representation/cube_summary.py | 5 +- lib/iris/analysis/__init__.py | 5 +- lib/iris/analysis/_area_weighted.py | 5 +- lib/iris/analysis/_grid_angles.py | 5 +- lib/iris/analysis/_interpolation.py | 5 +- lib/iris/analysis/_regrid.py | 5 +- lib/iris/analysis/calculus.py | 5 +- lib/iris/analysis/cartography.py | 5 +- lib/iris/analysis/geometry.py | 5 +- lib/iris/analysis/maths.py | 5 +- lib/iris/analysis/stats.py | 5 +- lib/iris/analysis/trajectory.py | 5 +- lib/iris/aux_factory.py | 5 +- lib/iris/common/__init__.py | 5 +- lib/iris/common/lenient.py | 5 +- lib/iris/common/metadata.py | 5 +- lib/iris/common/mixin.py | 5 +- lib/iris/common/resolve.py | 5 +- lib/iris/config.py | 5 +- lib/iris/coord_categorisation.py | 5 +- lib/iris/coord_systems.py | 5 +- lib/iris/coords.py | 5 +- lib/iris/cube.py | 5 +- lib/iris/exceptions.py | 5 +- lib/iris/experimental/__init__.py | 5 +- lib/iris/experimental/animate.py | 5 +- lib/iris/experimental/raster.py | 5 +- lib/iris/experimental/regrid.py | 5 +- lib/iris/experimental/regrid_conservative.py | 5 +- lib/iris/experimental/representation.py | 5 +- lib/iris/experimental/stratify.py | 5 +- lib/iris/experimental/ugrid/__init__.py | 5 +- lib/iris/experimental/ugrid/cf.py | 5 +- lib/iris/experimental/ugrid/load.py | 5 +- lib/iris/experimental/ugrid/mesh.py | 5 +- lib/iris/experimental/ugrid/metadata.py | 5 +- lib/iris/experimental/ugrid/save.py | 5 +- lib/iris/experimental/ugrid/utils.py | 5 +- lib/iris/fileformats/__init__.py | 5 +- lib/iris/fileformats/_ff.py | 5 +- lib/iris/fileformats/_ff_cross_references.py | 5 +- .../fileformats/_nc_load_rules/__init__.py | 5 +- .../fileformats/_nc_load_rules/actions.py | 5 +- lib/iris/fileformats/_nc_load_rules/engine.py | 5 +- .../fileformats/_nc_load_rules/helpers.py | 5 +- lib/iris/fileformats/_pp_lbproc_pairs.py | 5 +- .../_structured_array_identification.py | 5 +- lib/iris/fileformats/abf.py | 5 +- lib/iris/fileformats/cf.py | 5 +- lib/iris/fileformats/dot.py | 5 +- lib/iris/fileformats/name.py | 5 +- lib/iris/fileformats/name_loaders.py | 5 +- lib/iris/fileformats/netcdf/__init__.py | 5 +- lib/iris/fileformats/netcdf/_dask_locks.py | 5 +- .../fileformats/netcdf/_thread_safe_nc.py | 5 +- lib/iris/fileformats/netcdf/loader.py | 5 +- lib/iris/fileformats/netcdf/saver.py | 5 +- lib/iris/fileformats/nimrod.py | 5 +- lib/iris/fileformats/nimrod_load_rules.py | 5 +- lib/iris/fileformats/pp.py | 5 +- lib/iris/fileformats/pp_load_rules.py | 5 +- lib/iris/fileformats/pp_save_rules.py | 5 +- lib/iris/fileformats/rules.py | 5 +- lib/iris/fileformats/um/__init__.py | 5 +- lib/iris/fileformats/um/_fast_load.py | 5 +- .../um/_fast_load_structured_fields.py | 5 +- lib/iris/fileformats/um/_ff_replacement.py | 5 +- .../um/_optimal_array_structuring.py | 5 +- lib/iris/fileformats/um_cf_map.py | 5 +- lib/iris/io/__init__.py | 5 +- lib/iris/io/format_picker.py | 5 +- lib/iris/iterate.py | 5 +- lib/iris/palette.py | 5 +- lib/iris/pandas.py | 5 +- lib/iris/plot.py | 5 +- lib/iris/quickplot.py | 5 +- lib/iris/symbols.py | 5 +- lib/iris/tests/__init__.py | 5 +- lib/iris/tests/experimental/__init__.py | 5 +- .../tests/experimental/regrid/__init__.py | 5 +- ..._area_weighted_rectilinear_src_and_grid.py | 5 +- .../test_regrid_conservative_via_esmpy.py | 5 +- lib/iris/tests/experimental/test_raster.py | 5 +- lib/iris/tests/graphics/__init__.py | 5 +- lib/iris/tests/graphics/idiff.py | 5 +- lib/iris/tests/graphics/recreate_imagerepo.py | 5 +- lib/iris/tests/integration/__init__.py | 5 +- .../tests/integration/analysis/__init__.py | 5 +- .../analysis/test_area_weighted.py | 5 +- .../tests/integration/aux_factory/__init__.py | 5 +- .../aux_factory/test_OceanSigmaZFactory.py | 5 +- .../tests/integration/concatenate/__init__.py | 5 +- .../concatenate/test_concatenate.py | 5 +- .../integration/experimental/__init__.py | 5 +- .../experimental/test_CubeRepresentation.py | 5 +- .../test_regrid_ProjectedUnstructured.py | 5 +- .../experimental/test_ugrid_load.py | 5 +- .../experimental/test_ugrid_save.py | 5 +- .../tests/integration/fast_load/__init__.py | 5 +- .../integration/fast_load/test_fast_load.py | 5 +- lib/iris/tests/integration/merge/__init__.py | 5 +- .../tests/integration/merge/test_merge.py | 5 +- lib/iris/tests/integration/netcdf/__init__.py | 5 +- .../integration/netcdf/test__dask_locks.py | 5 +- .../integration/netcdf/test_attributes.py | 5 +- .../integration/netcdf/test_aux_factories.py | 5 +- .../integration/netcdf/test_coord_systems.py | 5 +- .../integration/netcdf/test_delayed_save.py | 5 +- .../tests/integration/netcdf/test_general.py | 5 +- .../netcdf/test_self_referencing.py | 5 +- .../integration/netcdf/test_thread_safety.py | 5 +- lib/iris/tests/integration/plot/__init__.py | 5 +- .../tests/integration/plot/test_animate.py | 5 +- .../tests/integration/plot/test_colorbar.py | 5 +- .../tests/integration/plot/test_netcdftime.py | 5 +- .../tests/integration/plot/test_nzdateline.py | 5 +- .../integration/plot/test_plot_2d_coords.py | 5 +- .../integration/plot/test_vector_plots.py | 5 +- lib/iris/tests/integration/test_Datums.py | 5 +- .../tests/integration/test_PartialDateTime.py | 5 +- .../tests/integration/test_climatology.py | 5 +- lib/iris/tests/integration/test_cube.py | 5 +- lib/iris/tests/integration/test_ff.py | 5 +- lib/iris/tests/integration/test_new_axis.py | 5 +- lib/iris/tests/integration/test_pickle.py | 5 +- lib/iris/tests/integration/test_pp.py | 5 +- .../test_pp_constrained_load_cubes.py | 5 +- .../integration/test_regrid_equivalence.py | 5 +- lib/iris/tests/integration/test_regridding.py | 5 +- lib/iris/tests/integration/test_subset.py | 5 +- lib/iris/tests/integration/test_trajectory.py | 5 +- lib/iris/tests/integration/um/__init__.py | 5 +- .../tests/integration/um/test_fieldsfile.py | 5 +- lib/iris/tests/pp.py | 5 +- lib/iris/tests/stock/__init__.py | 5 +- lib/iris/tests/stock/_stock_2d_latlons.py | 5 +- lib/iris/tests/stock/mesh.py | 5 +- lib/iris/tests/stock/netcdf.py | 5 +- lib/iris/tests/system_test.py | 5 +- lib/iris/tests/test_abf.py | 5 +- lib/iris/tests/test_aggregate_by.py | 5 +- lib/iris/tests/test_analysis.py | 5 +- lib/iris/tests/test_analysis_calculus.py | 5 +- lib/iris/tests/test_basic_maths.py | 5 +- lib/iris/tests/test_cartography.py | 5 +- lib/iris/tests/test_cdm.py | 5 +- lib/iris/tests/test_cell.py | 5 +- lib/iris/tests/test_cf.py | 5 +- lib/iris/tests/test_coding_standards.py | 10 +- lib/iris/tests/test_concatenate.py | 5 +- lib/iris/tests/test_constraints.py | 5 +- lib/iris/tests/test_coord_api.py | 5 +- lib/iris/tests/test_coordsystem.py | 5 +- lib/iris/tests/test_cube.py | 5 +- lib/iris/tests/test_cube_to_pp.py | 5 +- lib/iris/tests/test_ff.py | 5 +- lib/iris/tests/test_file_load.py | 5 +- lib/iris/tests/test_file_save.py | 5 +- lib/iris/tests/test_hybrid.py | 5 +- lib/iris/tests/test_image_json.py | 5 +- lib/iris/tests/test_imports.py | 5 +- lib/iris/tests/test_intersect.py | 5 +- lib/iris/tests/test_io_init.py | 5 +- lib/iris/tests/test_iterate.py | 5 +- lib/iris/tests/test_lazy_aggregate_by.py | 5 +- lib/iris/tests/test_load.py | 5 +- lib/iris/tests/test_mapping.py | 5 +- lib/iris/tests/test_merge.py | 5 +- lib/iris/tests/test_name.py | 5 +- lib/iris/tests/test_netcdf.py | 5 +- lib/iris/tests/test_nimrod.py | 5 +- lib/iris/tests/test_peak.py | 5 +- lib/iris/tests/test_pickling.py | 5 +- lib/iris/tests/test_plot.py | 5 +- lib/iris/tests/test_pp_cf.py | 5 +- lib/iris/tests/test_pp_module.py | 5 +- lib/iris/tests/test_pp_stash.py | 5 +- lib/iris/tests/test_pp_to_cube.py | 5 +- lib/iris/tests/test_quickplot.py | 5 +- lib/iris/tests/test_std_names.py | 5 +- lib/iris/tests/test_uri_callback.py | 5 +- lib/iris/tests/test_util.py | 5 +- lib/iris/tests/unit/__init__.py | 5 +- lib/iris/tests/unit/analysis/__init__.py | 5 +- .../unit/analysis/area_weighted/__init__.py | 5 +- .../test_AreaWeightedRegridder.py | 5 +- .../unit/analysis/cartography/__init__.py | 5 +- .../cartography/test__get_lon_lat_coords.py | 5 +- .../cartography/test__quadrant_area.py | 5 +- .../analysis/cartography/test__xy_range.py | 5 +- .../analysis/cartography/test_area_weights.py | 5 +- .../cartography/test_gridcell_angles.py | 5 +- .../unit/analysis/cartography/test_project.py | 5 +- .../cartography/test_rotate_grid_vectors.py | 5 +- .../analysis/cartography/test_rotate_winds.py | 5 +- .../tests/unit/analysis/geometry/__init__.py | 5 +- .../test__extract_relevant_cube_slice.py | 5 +- .../geometry/test_geometry_area_weights.py | 5 +- .../unit/analysis/interpolation/__init__.py | 5 +- .../test_RectilinearInterpolator.py | 5 +- .../interpolation/test_get_xy_dim_coords.py | 5 +- .../tests/unit/analysis/maths/__init__.py | 5 +- .../analysis/maths/test__arith__dask_array.py | 5 +- .../maths/test__arith__derived_coords.py | 5 +- .../analysis/maths/test__arith__meshcoords.py | 5 +- .../unit/analysis/maths/test__get_dtype.py | 5 +- .../maths/test__inplace_common_checks.py | 5 +- .../unit/analysis/maths/test__output_dtype.py | 5 +- .../tests/unit/analysis/maths/test_add.py | 5 +- .../tests/unit/analysis/maths/test_divide.py | 5 +- .../unit/analysis/maths/test_multiply.py | 5 +- .../unit/analysis/maths/test_subtract.py | 5 +- .../tests/unit/analysis/regrid/__init__.py | 5 +- .../regrid/test_RectilinearRegridder.py | 5 +- .../regrid/test__CurvilinearRegridder.py | 5 +- .../analysis/scipy_interpolate/__init__.py | 5 +- .../test__RegularGridInterpolator.py | 5 +- .../tests/unit/analysis/stats/__init__.py | 5 +- .../unit/analysis/stats/test_pearsonr.py | 5 +- .../tests/unit/analysis/test_Aggregator.py | 5 +- .../tests/unit/analysis/test_AreaWeighted.py | 5 +- lib/iris/tests/unit/analysis/test_COUNT.py | 5 +- lib/iris/tests/unit/analysis/test_Linear.py | 5 +- lib/iris/tests/unit/analysis/test_MAX.py | 5 +- lib/iris/tests/unit/analysis/test_MAX_RUN.py | 5 +- lib/iris/tests/unit/analysis/test_MEAN.py | 5 +- lib/iris/tests/unit/analysis/test_MIN.py | 5 +- lib/iris/tests/unit/analysis/test_Nearest.py | 5 +- .../tests/unit/analysis/test_PERCENTILE.py | 5 +- .../tests/unit/analysis/test_PROPORTION.py | 5 +- .../analysis/test_PercentileAggregator.py | 5 +- .../tests/unit/analysis/test_PointInCell.py | 5 +- lib/iris/tests/unit/analysis/test_RMS.py | 5 +- lib/iris/tests/unit/analysis/test_STD_DEV.py | 5 +- lib/iris/tests/unit/analysis/test_SUM.py | 5 +- lib/iris/tests/unit/analysis/test_VARIANCE.py | 5 +- .../tests/unit/analysis/test_WPERCENTILE.py | 5 +- .../test_WeightedPercentileAggregator.py | 5 +- .../analysis/test__axis_to_single_trailing.py | 5 +- .../unit/analysis/trajectory/__init__.py | 5 +- .../analysis/trajectory/test_Trajectory.py | 5 +- ...t_UnstructuredNearestNeighbourRegridder.py | 5 +- ...est__nearest_neighbour_indices_ndcoords.py | 5 +- .../analysis/trajectory/test_interpolate.py | 5 +- lib/iris/tests/unit/aux_factory/__init__.py | 5 +- .../test_AtmosphereSigmaFactory.py | 5 +- .../unit/aux_factory/test_AuxCoordFactory.py | 5 +- .../aux_factory/test_HybridPressureFactory.py | 5 +- .../unit/aux_factory/test_OceanSFactory.py | 5 +- .../unit/aux_factory/test_OceanSg1Factory.py | 5 +- .../unit/aux_factory/test_OceanSg2Factory.py | 5 +- .../aux_factory/test_OceanSigmaFactory.py | 5 +- .../aux_factory/test_OceanSigmaZFactory.py | 5 +- lib/iris/tests/unit/common/__init__.py | 5 +- .../tests/unit/common/lenient/__init__.py | 5 +- .../tests/unit/common/lenient/test_Lenient.py | 5 +- .../unit/common/lenient/test__Lenient.py | 5 +- .../common/lenient/test__lenient_client.py | 5 +- .../common/lenient/test__lenient_service.py | 5 +- .../unit/common/lenient/test__qualname.py | 5 +- .../tests/unit/common/metadata/__init__.py | 5 +- .../test_AncillaryVariableMetadata.py | 5 +- .../unit/common/metadata/test_BaseMetadata.py | 5 +- .../metadata/test_CellMeasureMetadata.py | 5 +- .../common/metadata/test_CoordMetadata.py | 5 +- .../unit/common/metadata/test_CubeMetadata.py | 5 +- .../common/metadata/test__NamedTupleMeta.py | 5 +- .../unit/common/metadata/test_hexdigest.py | 5 +- .../common/metadata/test_metadata_filter.py | 5 +- .../metadata/test_metadata_manager_factory.py | 5 +- lib/iris/tests/unit/common/mixin/__init__.py | 5 +- .../unit/common/mixin/test_CFVariableMixin.py | 5 +- .../common/mixin/test_LimitedAttributeDict.py | 5 +- .../mixin/test__get_valid_standard_name.py | 5 +- .../tests/unit/common/resolve/__init__.py | 5 +- .../tests/unit/common/resolve/test_Resolve.py | 5 +- lib/iris/tests/unit/concatenate/__init__.py | 5 +- .../unit/concatenate/test__CoordMetaData.py | 5 +- .../unit/concatenate/test__CoordSignature.py | 5 +- .../unit/concatenate/test__CubeSignature.py | 5 +- .../unit/concatenate/test_concatenate.py | 5 +- lib/iris/tests/unit/config/__init__.py | 5 +- lib/iris/tests/unit/config/test_NetCDF.py | 5 +- lib/iris/tests/unit/constraints/__init__.py | 5 +- .../constraints/test_Constraint_equality.py | 5 +- .../unit/constraints/test_NameConstraint.py | 5 +- .../unit/coord_categorisation/__init__.py | 5 +- .../test_add_categorised_coord.py | 5 +- .../coord_categorisation/test_add_hour.py | 5 +- .../test_coord_categorisation.py | 5 +- lib/iris/tests/unit/coord_systems/__init__.py | 5 +- .../coord_systems/test_AlbersEqualArea.py | 5 +- .../tests/unit/coord_systems/test_GeogCS.py | 5 +- .../unit/coord_systems/test_Geostationary.py | 5 +- .../test_LambertAzimuthalEqualArea.py | 5 +- .../coord_systems/test_LambertConformal.py | 5 +- .../tests/unit/coord_systems/test_Mercator.py | 5 +- .../coord_systems/test_ObliqueMercator.py | 5 +- .../unit/coord_systems/test_Orthographic.py | 5 +- .../coord_systems/test_PolarStereographic.py | 5 +- .../coord_systems/test_RotatedMercator.py | 5 +- .../unit/coord_systems/test_RotatedPole.py | 5 +- .../unit/coord_systems/test_Stereographic.py | 5 +- .../coord_systems/test_TransverseMercator.py | 5 +- .../coord_systems/test_VerticalPerspective.py | 5 +- lib/iris/tests/unit/coords/__init__.py | 5 +- .../unit/coords/test_AncillaryVariable.py | 5 +- lib/iris/tests/unit/coords/test_AuxCoord.py | 5 +- lib/iris/tests/unit/coords/test_Cell.py | 5 +- .../tests/unit/coords/test_CellMeasure.py | 5 +- lib/iris/tests/unit/coords/test_CellMethod.py | 5 +- lib/iris/tests/unit/coords/test_Coord.py | 5 +- lib/iris/tests/unit/coords/test_DimCoord.py | 5 +- .../unit/coords/test__DimensionalMetadata.py | 5 +- lib/iris/tests/unit/cube/__init__.py | 5 +- lib/iris/tests/unit/cube/test_Cube.py | 5 +- lib/iris/tests/unit/cube/test_CubeList.py | 5 +- .../unit/cube/test_Cube__aggregated_by.py | 5 +- .../tests/unit/cube/test_Cube__operators.py | 5 +- lib/iris/tests/unit/data_manager/__init__.py | 5 +- .../unit/data_manager/test_DataManager.py | 5 +- lib/iris/tests/unit/experimental/__init__.py | 5 +- .../unit/experimental/raster/__init__.py | 5 +- .../raster/test_export_geotiff.py | 5 +- .../unit/experimental/regrid/__init__.py | 5 +- ..._area_weighted_rectilinear_src_and_grid.py | 5 +- ...rid_weighted_curvilinear_to_rectilinear.py | 5 +- .../experimental/representation/__init__.py | 5 +- .../test_CubeListRepresentation.py | 5 +- .../representation/test_CubeRepresentation.py | 5 +- .../unit/experimental/stratify/__init__.py | 5 +- .../experimental/stratify/test_relevel.py | 5 +- .../tests/unit/experimental/ugrid/__init__.py | 5 +- .../unit/experimental/ugrid/cf/__init__.py | 5 +- ...test_CFUGridAuxiliaryCoordinateVariable.py | 5 +- .../cf/test_CFUGridConnectivityVariable.py | 5 +- .../ugrid/cf/test_CFUGridGroup.py | 5 +- .../ugrid/cf/test_CFUGridMeshVariable.py | 5 +- .../ugrid/cf/test_CFUGridReader.py | 5 +- .../unit/experimental/ugrid/load/__init__.py | 5 +- .../ugrid/load/test_ParseUgridOnLoad.py | 5 +- .../experimental/ugrid/load/test_load_mesh.py | 5 +- .../ugrid/load/test_load_meshes.py | 5 +- .../unit/experimental/ugrid/mesh/__init__.py | 5 +- .../ugrid/mesh/test_Connectivity.py | 5 +- .../unit/experimental/ugrid/mesh/test_Mesh.py | 5 +- .../experimental/ugrid/mesh/test_MeshCoord.py | 5 +- .../ugrid/mesh/test_Mesh__from_coords.py | 5 +- .../experimental/ugrid/metadata/__init__.py | 5 +- .../metadata/test_ConnectivityMetadata.py | 5 +- .../ugrid/metadata/test_MeshCoordMetadata.py | 5 +- .../ugrid/metadata/test_MeshMetadata.py | 5 +- .../unit/experimental/ugrid/utils/__init__.py | 5 +- .../ugrid/utils/test_recombine_submeshes.py | 5 +- lib/iris/tests/unit/fileformats/__init__.py | 5 +- .../tests/unit/fileformats/abf/__init__.py | 5 +- .../unit/fileformats/abf/test_ABFField.py | 5 +- .../tests/unit/fileformats/cf/__init__.py | 5 +- .../tests/unit/fileformats/cf/test_CFGroup.py | 5 +- .../unit/fileformats/cf/test_CFReader.py | 5 +- .../tests/unit/fileformats/dot/__init__.py | 5 +- .../unit/fileformats/dot/test__dot_path.py | 5 +- .../tests/unit/fileformats/ff/__init__.py | 5 +- .../unit/fileformats/ff/test_ArakawaC.py | 5 +- .../tests/unit/fileformats/ff/test_ENDGame.py | 5 +- .../tests/unit/fileformats/ff/test_FF2PP.py | 5 +- .../unit/fileformats/ff/test_FFHeader.py | 5 +- .../tests/unit/fileformats/ff/test_Grid.py | 5 +- .../unit/fileformats/ff/test_NewDynamics.py | 5 +- .../unit/fileformats/name_loaders/__init__.py | 5 +- .../name_loaders/test__build_cell_methods.py | 5 +- ...test__build_lat_lon_for_NAME_timeseries.py | 5 +- .../test__calc_integration_period.py | 5 +- .../name_loaders/test__cf_height_from_name.py | 5 +- .../name_loaders/test__generate_cubes.py | 5 +- .../fileformats/nc_load_rules/__init__.py | 5 +- .../nc_load_rules/actions/__init__.py | 5 +- .../actions/test__grid_mappings.py | 5 +- .../actions/test__hybrid_formulae.py | 5 +- .../actions/test__latlon_dimcoords.py | 5 +- .../actions/test__miscellaneous.py | 5 +- .../actions/test__time_coords.py | 5 +- .../nc_load_rules/engine/__init__.py | 5 +- .../nc_load_rules/engine/test_engine.py | 5 +- .../nc_load_rules/helpers/__init__.py | 5 +- ...ild_albers_equal_area_coordinate_system.py | 5 +- .../helpers/test_build_ancil_var.py | 5 +- .../test_build_auxiliary_coordinate.py | 5 +- .../helpers/test_build_cell_measure.py | 5 +- .../helpers/test_build_cube_metadata.py | 5 +- .../test_build_dimension_coordinate.py | 5 +- ...t_build_geostationary_coordinate_system.py | 5 +- ..._azimuthal_equal_area_coordinate_system.py | 5 +- ...ild_lambert_conformal_coordinate_system.py | 5 +- .../test_build_mercator_coordinate_system.py | 5 +- ...uild_oblique_mercator_coordinate_system.py | 5 +- ...d_polar_stereographic_coordinate_system.py | 5 +- ...t_build_stereographic_coordinate_system.py | 5 +- ...d_transverse_mercator_coordinate_system.py | 5 +- .../test_build_verticalp_coordinate_system.py | 5 +- .../helpers/test_get_attr_units.py | 5 +- .../helpers/test_get_cf_bounds_var.py | 5 +- .../nc_load_rules/helpers/test_get_names.py | 5 +- .../test_has_supported_mercator_parameters.py | 5 +- ...upported_polar_stereographic_parameters.py | 5 +- .../helpers/test_parse_cell_methods.py | 5 +- .../helpers/test_reorder_bounds_data.py | 5 +- .../tests/unit/fileformats/netcdf/__init__.py | 5 +- .../fileformats/netcdf/loader/__init__.py | 5 +- .../netcdf/loader/test__get_cf_var_data.py | 5 +- .../netcdf/loader/test__load_aux_factory.py | 5 +- .../netcdf/loader/test__load_cube.py | 5 +- ...__translate_constraints_to_var_callback.py | 5 +- .../netcdf/loader/test_load_cubes.py | 5 +- .../unit/fileformats/netcdf/saver/__init__.py | 5 +- .../fileformats/netcdf/saver/test_Saver.py | 5 +- .../netcdf/saver/test_Saver__lazy.py | 5 +- .../saver/test_Saver__lazy_stream_data.py | 5 +- .../netcdf/saver/test_Saver__ugrid.py | 5 +- .../saver/test__data_fillvalue_check.py | 5 +- .../netcdf/saver/test__fillvalue_report.py | 5 +- .../fileformats/netcdf/saver/test_save.py | 5 +- .../fileformats/nimrod_load_rules/__init__.py | 5 +- .../nimrod_load_rules/test_units.py | 5 +- .../nimrod_load_rules/test_vertical_coord.py | 5 +- .../tests/unit/fileformats/pp/__init__.py | 5 +- .../unit/fileformats/pp/test_PPDataProxy.py | 5 +- .../tests/unit/fileformats/pp/test_PPField.py | 5 +- .../pp/test__convert_constraints.py | 5 +- .../fileformats/pp/test__create_field_data.py | 5 +- .../pp/test__data_bytes_to_shaped_array.py | 5 +- .../unit/fileformats/pp/test__field_gen.py | 5 +- .../fileformats/pp/test__interpret_field.py | 5 +- .../unit/fileformats/pp/test_as_fields.py | 5 +- .../tests/unit/fileformats/pp/test_load.py | 5 +- .../tests/unit/fileformats/pp/test_save.py | 5 +- .../unit/fileformats/pp/test_save_fields.py | 5 +- .../pp/test_save_pairs_from_cube.py | 5 +- .../fileformats/pp_load_rules/__init__.py | 5 +- .../pp_load_rules/test__all_other_rules.py | 5 +- ...__collapse_degenerate_points_and_bounds.py | 5 +- ...est__convert_scalar_pseudo_level_coords.py | 5 +- ...test__convert_scalar_realization_coords.py | 5 +- .../test__convert_time_coords.py | 5 +- .../test__convert_vertical_coords.py | 5 +- .../pp_load_rules/test__dim_or_aux.py | 5 +- .../pp_load_rules/test__epoch_date_hours.py | 5 +- .../pp_load_rules/test__model_level_number.py | 5 +- .../test__reduced_points_and_bounds.py | 5 +- .../test__reshape_vector_args.py | 5 +- .../fileformats/pp_load_rules/test_convert.py | 5 +- .../tests/unit/fileformats/rules/__init__.py | 5 +- .../unit/fileformats/rules/test_Loader.py | 5 +- .../unit/fileformats/rules/test__make_cube.py | 5 +- .../__init__.py | 5 +- .../test_ArrayStructure.py | 5 +- .../test_GroupStructure.py | 5 +- lib/iris/tests/unit/fileformats/test_rules.py | 5 +- .../tests/unit/fileformats/um/__init__.py | 5 +- .../unit/fileformats/um/fast_load/__init__.py | 5 +- .../um/fast_load/test_FieldCollation.py | 5 +- .../um/fast_load/test__convert_collation.py | 5 +- .../fast_load_structured_fields/__init__.py | 5 +- .../test_BasicFieldCollation.py | 5 +- .../test_group_structured_fields.py | 5 +- .../um/optimal_array_structuring/__init__.py | 5 +- .../test_optimal_array_structure.py | 5 +- .../unit/fileformats/um/test_um_to_pp.py | 5 +- lib/iris/tests/unit/io/__init__.py | 5 +- .../tests/unit/io/test__generate_cubes.py | 5 +- .../tests/unit/io/test_expand_filespecs.py | 5 +- lib/iris/tests/unit/io/test_run_callback.py | 5 +- lib/iris/tests/unit/io/test_save.py | 5 +- lib/iris/tests/unit/lazy_data/__init__.py | 5 +- .../unit/lazy_data/test_as_concrete_data.py | 5 +- .../tests/unit/lazy_data/test_as_lazy_data.py | 5 +- .../unit/lazy_data/test_co_realise_cubes.py | 5 +- .../tests/unit/lazy_data/test_is_lazy_data.py | 5 +- .../lazy_data/test_is_lazy_masked_data.py | 5 +- .../unit/lazy_data/test_lazy_elementwise.py | 5 +- .../lazy_data/test_map_complete_blocks.py | 5 +- .../lazy_data/test_multidim_lazy_stack.py | 5 +- .../tests/unit/lazy_data/test_non_lazy.py | 5 +- lib/iris/tests/unit/merge/__init__.py | 5 +- lib/iris/tests/unit/merge/test_ProtoCube.py | 5 +- lib/iris/tests/unit/pandas/__init__.py | 5 +- lib/iris/tests/unit/pandas/test_pandas.py | 5 +- lib/iris/tests/unit/plot/__init__.py | 5 +- lib/iris/tests/unit/plot/_blockplot_common.py | 5 +- .../test__check_bounds_contiguity_and_mask.py | 5 +- ..._check_geostationary_coords_and_convert.py | 5 +- lib/iris/tests/unit/plot/test__fixup_dates.py | 5 +- .../tests/unit/plot/test__get_plot_defn.py | 5 +- ...est__get_plot_defn_custom_coords_picked.py | 5 +- .../tests/unit/plot/test__get_plot_objects.py | 5 +- .../test__replace_axes_with_cartopy_axes.py | 5 +- lib/iris/tests/unit/plot/test_contour.py | 5 +- lib/iris/tests/unit/plot/test_contourf.py | 5 +- lib/iris/tests/unit/plot/test_hist.py | 5 +- lib/iris/tests/unit/plot/test_outline.py | 5 +- lib/iris/tests/unit/plot/test_pcolor.py | 5 +- lib/iris/tests/unit/plot/test_pcolormesh.py | 5 +- lib/iris/tests/unit/plot/test_plot.py | 5 +- lib/iris/tests/unit/plot/test_points.py | 5 +- lib/iris/tests/unit/plot/test_scatter.py | 5 +- lib/iris/tests/unit/quickplot/__init__.py | 5 +- lib/iris/tests/unit/quickplot/test_contour.py | 5 +- .../tests/unit/quickplot/test_contourf.py | 5 +- lib/iris/tests/unit/quickplot/test_outline.py | 5 +- lib/iris/tests/unit/quickplot/test_pcolor.py | 5 +- .../tests/unit/quickplot/test_pcolormesh.py | 5 +- lib/iris/tests/unit/quickplot/test_plot.py | 5 +- lib/iris/tests/unit/quickplot/test_points.py | 5 +- lib/iris/tests/unit/quickplot/test_scatter.py | 5 +- .../tests/unit/representation/__init__.py | 5 +- .../representation/cube_printout/__init__.py | 5 +- .../cube_printout/test_CubePrintout.py | 5 +- .../cube_printout/test_Table.py | 5 +- .../representation/cube_summary/__init__.py | 5 +- .../cube_summary/test_CubeSummary.py | 5 +- lib/iris/tests/unit/test_Future.py | 5 +- lib/iris/tests/unit/test_sample_data_path.py | 5 +- lib/iris/tests/unit/tests/__init__.py | 5 +- lib/iris/tests/unit/tests/stock/__init__.py | 5 +- .../tests/unit/tests/stock/test_netcdf.py | 5 +- lib/iris/tests/unit/tests/test_IrisTest.py | 5 +- lib/iris/tests/unit/time/__init__.py | 5 +- .../tests/unit/time/test_PartialDateTime.py | 5 +- lib/iris/tests/unit/util/__init__.py | 5 +- .../tests/unit/util/test__coord_regular.py | 5 +- lib/iris/tests/unit/util/test__is_circular.py | 5 +- lib/iris/tests/unit/util/test__mask_array.py | 5 +- .../unit/util/test__slice_data_with_keys.py | 5 +- lib/iris/tests/unit/util/test_array_equal.py | 5 +- .../unit/util/test_broadcast_to_shape.py | 5 +- .../unit/util/test_column_slices_generator.py | 5 +- .../test_demote_dim_coord_to_aux_coord.py | 5 +- .../tests/unit/util/test_describe_diff.py | 5 +- .../unit/util/test_equalise_attributes.py | 5 +- .../unit/util/test_file_is_newer_than.py | 5 +- .../unit/util/test_find_discontiguities.py | 5 +- lib/iris/tests/unit/util/test_mask_cube.py | 5 +- lib/iris/tests/unit/util/test_new_axis.py | 5 +- .../test_promote_aux_coord_to_dim_coord.py | 5 +- lib/iris/tests/unit/util/test_reverse.py | 5 +- .../tests/unit/util/test_rolling_window.py | 5 +- lib/iris/tests/unit/util/test_squeeze.py | 5 +- .../tests/unit/util/test_unify_time_units.py | 5 +- lib/iris/time.py | 5 +- lib/iris/util.py | 5 +- pyproject.toml | 6 +- tools/generate_std_names.py | 10 +- tools/release_do_nothing.py | 5 +- tools/update_lockfiles.py | 5 +- 608 files changed, 1248 insertions(+), 2663 deletions(-) delete mode 100644 COPYING delete mode 100644 COPYING.LESSER create mode 100644 LICENSE diff --git a/COPYING b/COPYING deleted file mode 100644 index 94a9ed024d..0000000000 --- a/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/COPYING.LESSER b/COPYING.LESSER deleted file mode 100644 index 65c5ca88a6..0000000000 --- a/COPYING.LESSER +++ /dev/null @@ -1,165 +0,0 @@ - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..2d1d23e16c --- /dev/null +++ b/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2010, Met Office. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in index 329cf79c5d..354b92d735 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -17,8 +17,7 @@ exclude .readthedocs.yml exclude CHANGES exclude CODE_OF_CONDUCT.md exclude codecov.yml -include COPYING -include COPYING.LESSER +include LICENSE exclude Makefile exclude noxfile.py diff --git a/README.md b/README.md index 53d24b0162..f857608718 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ | 💬 Community | [![Contributor Covenant](https://img.shields.io/badge/contributor%20covenant-2.1-4baaaa.svg)](https://www.contributor-covenant.org/version/2/1/code_of_conduct/) [![GH Discussions](https://img.shields.io/badge/github-discussions%20%F0%9F%92%AC-yellow?logo=github&logoColor=lightgrey)](https://github.com/SciTools/iris/discussions) [![twitter](https://img.shields.io/twitter/follow/scitools_iris?color=yellow&label=twitter%7Cscitools_iris&logo=twitter&style=plastic)](https://twitter.com/scitools_iris) | | 📖 Documentation | [![rtd](https://readthedocs.org/projects/scitools-iris/badge/?version=latest)](https://scitools-iris.readthedocs.io/en/latest/?badge=latest) | | 📈 Health | [![codecov](https://codecov.io/gh/SciTools/iris/branch/main/graph/badge.svg?token=0GeICSIF3g)](https://codecov.io/gh/SciTools/iris) | -| ✨ Meta | [![code style - black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/COPYING.LESSER) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | +| ✨ Meta | [![code style - black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/LICENSE) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | | 📦 Package | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.595182.svg)](https://doi.org/10.5281/zenodo.595182) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/iris?color=orange&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/iris) [![pypi](https://img.shields.io/pypi/v/scitools-iris?color=orange&label=pypi&logo=python&logoColor=white)](https://pypi.org/project/scitools-iris/) [![pypi - python version](https://img.shields.io/pypi/pyversions/scitools-iris.svg?color=orange&logo=python&label=python&logoColor=white)](https://pypi.org/project/scitools-iris/) | | 🧰 Repo | [![commits-since](https://img.shields.io/github/commits-since/SciTools/iris/latest.svg)](https://github.com/SciTools/iris/commits/main) [![contributors](https://img.shields.io/github/contributors/SciTools/iris)](https://github.com/SciTools/iris/graphs/contributors) [![release](https://img.shields.io/github/v/release/scitools/iris)](https://github.com/SciTools/iris/releases) | | | diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index 22a3110075..a60cb7f2b7 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` subclass that manages the Conda environment via custom user scripts. diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index c86682ca4a..61983a969f 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Common code for benchmarks.""" from os import environ import resource diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index 4cc4f6c70a..7d1e266c78 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ AuxFactory benchmark tests. diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index 3107dcf077..b6f56b997f 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Coord benchmark tests. diff --git a/benchmarks/benchmarks/cperf/__init__.py b/benchmarks/benchmarks/cperf/__init__.py index 814d29338f..7adba01b60 100644 --- a/benchmarks/benchmarks/cperf/__init__.py +++ b/benchmarks/benchmarks/cperf/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py index 47eb255513..f27558a5ed 100644 --- a/benchmarks/benchmarks/cperf/equality.py +++ b/benchmarks/benchmarks/cperf/equality.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/cperf/load.py b/benchmarks/benchmarks/cperf/load.py index 04bb7e1a61..efbd497e2e 100644 --- a/benchmarks/benchmarks/cperf/load.py +++ b/benchmarks/benchmarks/cperf/load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py index 2eb60e2ab5..957b28e3fd 100644 --- a/benchmarks/benchmarks/cperf/save.py +++ b/benchmarks/benchmarks/cperf/save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ File saving benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index 5889ce872b..ceacb4e86c 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Cube benchmark tests. diff --git a/benchmarks/benchmarks/experimental/__init__.py b/benchmarks/benchmarks/experimental/__init__.py index f16e400bce..81fb222916 100644 --- a/benchmarks/benchmarks/experimental/__init__.py +++ b/benchmarks/benchmarks/experimental/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Benchmark tests for the experimental module. diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py index 2e40c525a6..1fa8b82d67 100644 --- a/benchmarks/benchmarks/experimental/ugrid/__init__.py +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Benchmark tests for the experimental.ugrid module. diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index 5ecc90930b..6d62cf9cd5 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Benchmarks stages of operation of the function :func:`iris.experimental.ugrid.utils.recombine_submeshes`. diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py index 52a5aceca8..8837e7cca9 100644 --- a/benchmarks/benchmarks/generate_data/__init__.py +++ b/benchmarks/benchmarks/generate_data/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Scripts for generating supporting data for benchmarking. diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py index 954e791f43..b6702ad883 100644 --- a/benchmarks/benchmarks/generate_data/stock.py +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Wrappers for using :mod:`iris.tests.stock` methods for benchmarking. diff --git a/benchmarks/benchmarks/generate_data/ugrid.py b/benchmarks/benchmarks/generate_data/ugrid.py index 527b49a6bb..3be5c20a48 100644 --- a/benchmarks/benchmarks/generate_data/ugrid.py +++ b/benchmarks/benchmarks/generate_data/ugrid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Scripts for generating supporting data for UGRID-related benchmarking. """ diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py index 39773bbb4b..23d3770aa1 100644 --- a/benchmarks/benchmarks/generate_data/um_files.py +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Generate FF, PP and NetCDF files based on a minimal synthetic FF file. diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index fc32ac289b..5f902fd2e0 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. from importlib import import_module, reload ################ diff --git a/benchmarks/benchmarks/iterate.py b/benchmarks/benchmarks/iterate.py index 0a5415ac2b..6cc935498c 100644 --- a/benchmarks/benchmarks/iterate.py +++ b/benchmarks/benchmarks/iterate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Iterate benchmark tests. diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index 3d15629f9e..a926e6b7e2 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ File loading benchmark tests. diff --git a/benchmarks/benchmarks/load/ugrid.py b/benchmarks/benchmarks/load/ugrid.py index 35c8754171..ef01ae03be 100644 --- a/benchmarks/benchmarks/load/ugrid.py +++ b/benchmarks/benchmarks/load/ugrid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Mesh data loading benchmark tests. diff --git a/benchmarks/benchmarks/metadata_manager_factory.py b/benchmarks/benchmarks/metadata_manager_factory.py index 7e7fc98008..531af58b66 100644 --- a/benchmarks/benchmarks/metadata_manager_factory.py +++ b/benchmarks/benchmarks/metadata_manager_factory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ metadata_manager_factory benchmark tests. diff --git a/benchmarks/benchmarks/mixin.py b/benchmarks/benchmarks/mixin.py index bec5518eee..335bee1a0f 100644 --- a/benchmarks/benchmarks/mixin.py +++ b/benchmarks/benchmarks/mixin.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Mixin benchmark tests. diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index 75195c86e9..73a2a51990 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Plot benchmark tests. diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py index 44bd1b6c95..9cd77527af 100644 --- a/benchmarks/benchmarks/regridding.py +++ b/benchmarks/benchmarks/regridding.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Regridding benchmark test diff --git a/benchmarks/benchmarks/save.py b/benchmarks/benchmarks/save.py index e9a7918dcc..6feb446c70 100644 --- a/benchmarks/benchmarks/save.py +++ b/benchmarks/benchmarks/save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ File saving benchmarks. diff --git a/benchmarks/benchmarks/sperf/__init__.py b/benchmarks/benchmarks/sperf/__init__.py index eccad56f6f..111cd4b841 100644 --- a/benchmarks/benchmarks/sperf/__init__.py +++ b/benchmarks/benchmarks/sperf/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py index e27b3b1996..da0cffde50 100644 --- a/benchmarks/benchmarks/sperf/combine_regions.py +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Region combine benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py index 85c73ab92b..bb3b707a75 100644 --- a/benchmarks/benchmarks/sperf/equality.py +++ b/benchmarks/benchmarks/sperf/equality.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/sperf/load.py b/benchmarks/benchmarks/sperf/load.py index 6a60355976..ab1273e288 100644 --- a/benchmarks/benchmarks/sperf/load.py +++ b/benchmarks/benchmarks/sperf/load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/sperf/save.py b/benchmarks/benchmarks/sperf/save.py index dd33924c6c..9892f0d239 100644 --- a/benchmarks/benchmarks/sperf/save.py +++ b/benchmarks/benchmarks/sperf/save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ File saving benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/trajectory.py b/benchmarks/benchmarks/trajectory.py index 5c1d10d218..e4c3297614 100644 --- a/benchmarks/benchmarks/trajectory.py +++ b/benchmarks/benchmarks/trajectory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Trajectory benchmark test diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 4b8f6e1f18..1efe8d3acb 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Argparse conveniences for executing common types of benchmark runs. """ diff --git a/docs/gallery_tests/__init__.py b/docs/gallery_tests/__init__.py index ac5753e58b..091e997248 100644 --- a/docs/gallery_tests/__init__.py +++ b/docs/gallery_tests/__init__.py @@ -1,5 +1,4 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py index a218b305a2..d3ca8309f8 100644 --- a/docs/gallery_tests/conftest.py +++ b/docs/gallery_tests/conftest.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Pytest fixtures for the gallery tests.""" diff --git a/docs/gallery_tests/test_gallery_examples.py b/docs/gallery_tests/test_gallery_examples.py index 0d0793a7da..93f361a62a 100644 --- a/docs/gallery_tests/test_gallery_examples.py +++ b/docs/gallery_tests/test_gallery_examples.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. import importlib diff --git a/docs/src/conf.py b/docs/src/conf.py index 8be34c1989..e349000862 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # -*- coding: utf-8 -*- # diff --git a/docs/src/copyright.rst b/docs/src/copyright.rst index 2829374f47..d5996fd999 100644 --- a/docs/src/copyright.rst +++ b/docs/src/copyright.rst @@ -6,18 +6,15 @@ Iris Code --------- All Iris source code, unless explicitly stated, is ``Copyright Iris -contributors`` and is licensed under the **GNU Lesser General Public -License** as published by the Free Software Foundation, either version 3 of -the License, or (at your option) any later version. +contributors`` and is licensed under the **BSD-3 License**. You should find all source files with the following header: .. admonition:: Code License Copyright Iris contributors - This file is part of Iris and is released under the LGPL license. - See COPYING and COPYING.LESSER in the root of the repository for full - licensing details. + This file is part of Iris and is released under the BSD license. + See LICENSE in the root of the repository for full licensing details. Iris Documentation and Examples diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst index 2cbad525d3..a69247008a 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/further_topics/filtering_warnings.rst @@ -47,9 +47,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:456: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:823: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:822: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -110,7 +110,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:456: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) :: @@ -125,16 +125,16 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=456) + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=455) ... my_operation() ... - iris/coord_systems.py:823: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:822: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: - python -W ignore:::iris.coord_systems:454 - export PYTHONWARNINGS=ignore:::iris.coord_systems:454 + python -W ignore:::iris.coord_systems:453 + export PYTHONWARNINGS=ignore:::iris.coord_systems:453 Warnings from a Common Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -188,7 +188,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:456: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) ---- diff --git a/docs/src/sphinxext/api_rst_formatting.py b/docs/src/sphinxext/api_rst_formatting.py index 8f1aa3c5f3..6dd82de91e 100644 --- a/docs/src/sphinxext/api_rst_formatting.py +++ b/docs/src/sphinxext/api_rst_formatting.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # This script will process all .rst files that have been created by # sphinxcontrib.apidoc extension and perform minor changes, specifically: diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 33a4a211d9..6e7087c687 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -24,7 +24,7 @@ This document explains the changes made to Iris for this release 📢 Announcements ================ -#. N/A +#. `@lbdreyer`_ relicensed Iris from LGPL-3 to BSD-3. (:pull: `5577`) ✨ Features diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 2a3bd8a753..c29998cd6d 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A package for handling multi-dimensional data and associated metadata. diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 837afd73f3..554f14d914 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Automatic concatenation of multiple cubes over one or more existing dimensions. diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 1884cbcbd9..82225ec516 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides objects for building up expressions useful for pattern matching. diff --git a/lib/iris/_data_manager.py b/lib/iris/_data_manager.py index 486a58de45..9ea4481307 100644 --- a/lib/iris/_data_manager.py +++ b/lib/iris/_data_manager.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Management of common state and behaviour for cube and coordinate data. diff --git a/lib/iris/_deprecation.py b/lib/iris/_deprecation.py index 027e11f2dc..ad4dc5a560 100644 --- a/lib/iris/_deprecation.py +++ b/lib/iris/_deprecation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Utilities for producing runtime deprecation messages. diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 4c294a7d2f..fb29f411d3 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Routines for lazy data handling. diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index 0f748d6d34..bf22f57887 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Automatic collation of cubes into higher-dimensional cubes. diff --git a/lib/iris/_representation/__init__.py b/lib/iris/_representation/__init__.py index f6c7fdf9b4..aec46ec927 100644 --- a/lib/iris/_representation/__init__.py +++ b/lib/iris/_representation/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Code to make printouts and other representations (e.g. html) of Iris objects. diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index ea32fc5126..9239c96949 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides text printouts of Iris cubes. diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 4e0fcfb1ea..1094588fa6 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides objects describing cube summaries. """ diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index f00c3dd850..76dd52de6e 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A package providing :class:`iris.cube.Cube` analysis support. diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index edead3948a..ffec82fd4e 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. import functools import cf_units diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 4cb449ae51..86a0c38086 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Code to implement vector rotation by angles, and inferring gridcell angles from coordinate points and bounds. diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index 34dcae3026..091d29d7e2 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """A collection of helpers for interpolation.""" from collections import namedtuple diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 65679cd968..113c21e6e3 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. import copy import functools diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 44b1adc580..f312aa02a0 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Calculus operations on :class:`iris.cube.Cube` instances. diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 0fae5bc499..a760f5ab50 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Various utilities and numeric transformations relevant to cartography. diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index 9898f4e974..d7ed7f8840 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Various utilities related to geometric operations. diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 5e180c6ee2..a24203ba2a 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Basic mathematical and statistical operations. diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 711e3c5bfb..121d862adb 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Statistical operations between cubes. diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 2495ff12fc..99c8add123 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory. diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index 323c89e3fb..61855f1188 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Definitions of derived coordinates. diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py index d8e8ba80ef..8526c549c3 100644 --- a/lib/iris/common/__init__.py +++ b/lib/iris/common/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A package for provisioning common Iris infrastructure. diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index 7c530087af..43dc09d5db 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides the infrastructure to support lenient client/service behaviour. diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 7def79f51e..8d60171331 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides the infrastructure to support the common metadata API. diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index 4c19dd756b..f3b42fc02d 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides common metadata mixin behaviour. diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 8d5d57d4a4..83ca630353 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides the infrastructure to support the analysis, identification and combination of metadata common between two :class:`~iris.cube.Cube` diff --git a/lib/iris/config.py b/lib/iris/config.py index 03d3d363a6..c1d1de5793 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides access to Iris-specific configuration values. diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index b6cc79f253..87103bf6f1 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Cube functions for coordinate categorisation. diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 3d986fefce..e62f3fbf0e 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Definitions of coordinate systems. diff --git a/lib/iris/coords.py b/lib/iris/coords.py index d5ee2667d8..30de08d496 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Definitions of coordinates and other dimensional metadata. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 60fdbc9c94..3a36a035c0 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Classes for representing multi-dimensional data with metadata. diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 919917a01d..c3e6b6193f 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Exceptions specific to the Iris package. diff --git a/lib/iris/experimental/__init__.py b/lib/iris/experimental/__init__.py index 40ba7fdb66..4c7c62b4f5 100644 --- a/lib/iris/experimental/__init__.py +++ b/lib/iris/experimental/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Experimental code can be introduced to Iris through this package. diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py index 1b6c2d46be..ba4e9e6050 100644 --- a/lib/iris/experimental/animate.py +++ b/lib/iris/experimental/animate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Wrapper for animating iris cubes using iris or matplotlib plotting functions diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 7c95293469..6fe12ea82a 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Experimental module for importing/exporting raster data from Iris cubes using the GDAL library. diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index d5fa7c6f72..f35a483b01 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Regridding functions. diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index fdc23c7bc4..83e65f89af 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Support for conservative regridding via ESMPy. diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 116b340592..785bf43e63 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Definitions of how Iris objects should be represented. diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index a7cfbf6876..604fda38a3 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Routines for putting data on new strata (aka. isosurfaces), often in the Z direction. diff --git a/lib/iris/experimental/ugrid/__init__.py b/lib/iris/experimental/ugrid/__init__.py index a3603a5355..30a934dfba 100644 --- a/lib/iris/experimental/ugrid/__init__.py +++ b/lib/iris/experimental/ugrid/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Infra-structure for unstructured mesh support, based on diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index 42c1cfd0a3..ba365aeb1f 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Extensions to Iris' CF variable representation to represent CF UGrid variables. diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index 67d1491930..c2a4b0c563 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Extensions to Iris' NetCDF loading to allow the construction of diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index af557c345c..68d208d867 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Iris' data model representation of CF UGrid's Mesh and its constituent parts. diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py index 44bbe04fe9..bfdcc7e114 100644 --- a/lib/iris/experimental/ugrid/metadata.py +++ b/lib/iris/experimental/ugrid/metadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ The common metadata API classes for :mod:`iris.experimental.ugrid.mesh`. diff --git a/lib/iris/experimental/ugrid/save.py b/lib/iris/experimental/ugrid/save.py index 3c42137905..f09740d98c 100644 --- a/lib/iris/experimental/ugrid/save.py +++ b/lib/iris/experimental/ugrid/save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Extensions to Iris' NetCDF saving to allow diff --git a/lib/iris/experimental/ugrid/utils.py b/lib/iris/experimental/ugrid/utils.py index a13a43d3fd..05e60c3ce7 100644 --- a/lib/iris/experimental/ugrid/utils.py +++ b/lib/iris/experimental/ugrid/utils.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Utility operations specific to unstructured data. diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index 86b304b82c..b74b420802 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A package for converting cubes to and from specific file formats. diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 5121b47976..76df5d5718 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides UK Met Office Fields File (FF) format specific capabilities. diff --git a/lib/iris/fileformats/_ff_cross_references.py b/lib/iris/fileformats/_ff_cross_references.py index ca41f5257f..b060ed42e9 100644 --- a/lib/iris/fileformats/_ff_cross_references.py +++ b/lib/iris/fileformats/_ff_cross_references.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Table providing UM grid-code, field-code and pseudolevel-type for (some) stash codes. Used in UM file i/o. diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py index b102a082df..ca2f341249 100644 --- a/lib/iris/fileformats/_nc_load_rules/__init__.py +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Support for cube-specific CF-to-Iris translation operations. diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 44ef7ac549..7db15d21ac 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Replacement code for the Pyke rules. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index 497c2a12c9..ec7a28777b 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 9c75c0e866..71e59feda0 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ All the pure-Python 'helper' functions which were previously included in the Pyke rules database 'fc_rules_cf.krb'. diff --git a/lib/iris/fileformats/_pp_lbproc_pairs.py b/lib/iris/fileformats/_pp_lbproc_pairs.py index 7ad6f21848..86a5f9381d 100644 --- a/lib/iris/fileformats/_pp_lbproc_pairs.py +++ b/lib/iris/fileformats/_pp_lbproc_pairs.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. import itertools diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index 11c62983e3..031a5e7483 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. r""" The purpose of this module is to provide utilities for the identification of multi-dimensional structure in a flat sequence of homogeneous objects. diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 4dcd5ce6aa..26a1f307b2 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides ABF (and ABL) file format capabilities. diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index f412955adb..86960003db 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides the capability to load netCDF files and interpret them according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 50c02e4d04..e3a4493fe8 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`. diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index 9a779cc92d..16f71a940f 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Provides NAME file format loading capabilities.""" diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index cb8867b6ea..7cc7c61d81 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """NAME file format loading functions.""" import collections diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index b696b200ff..99817c5921 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Module to support the loading and saving of NetCDF files, also using the CF conventions for metadata interpretation. diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py index 15ac117a8b..b7727a1ab7 100644 --- a/lib/iris/fileformats/netcdf/_dask_locks.py +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Module containing code to create locks enabling dask workers to co-operate. diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 21c697acab..b5226b8e42 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Module to ensure all calls to the netCDF4 library are thread-safe. diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 29202af89e..030427a2b9 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Module to support the loading of Iris cubes from NetCDF files, also using the CF conventions for metadata interpretation. diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 011f74892d..6409d8c311 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Module to support the saving of Iris cubes to a NetCDF file, also using the CF conventions for metadata interpretation. diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index 6f39ca87fa..d4e86502bd 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Provides NIMROD file format capabilities.""" import glob diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index 17db0644ee..7347135422 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Rules for converting NIMROD fields into cubes.""" import re diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index e19ba3adff..4b2b7eeae0 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides UK Met Office Post Process (PP) format specific capabilities. diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 11d03e978a..1aed25311d 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # Historically this was auto-generated from diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 998255ff2b..7db21d5f99 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. import warnings diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index d5a4b9c823..bcfd4f8323 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Generalised mechanisms for metadata translation and cube construction. diff --git a/lib/iris/fileformats/um/__init__.py b/lib/iris/fileformats/um/__init__.py index c01e8301e2..ac38e45de5 100644 --- a/lib/iris/fileformats/um/__init__.py +++ b/lib/iris/fileformats/um/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides iris loading support for UM Fieldsfile-like file types, and PP. diff --git a/lib/iris/fileformats/um/_fast_load.py b/lib/iris/fileformats/um/_fast_load.py index e29025c169..ce9d183586 100644 --- a/lib/iris/fileformats/um/_fast_load.py +++ b/lib/iris/fileformats/um/_fast_load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Support for "fast" loading of structured UM files in iris load functions, i.e. :meth:`iris.load` and its associates. diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py index 64b7f8e891..2a41cf99ba 100644 --- a/lib/iris/fileformats/um/_fast_load_structured_fields.py +++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Code for fast loading of structured UM data. diff --git a/lib/iris/fileformats/um/_ff_replacement.py b/lib/iris/fileformats/um/_ff_replacement.py index 0a661081c7..33ab2fbb68 100644 --- a/lib/iris/fileformats/um/_ff_replacement.py +++ b/lib/iris/fileformats/um/_ff_replacement.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Support for UM "fieldsfile-like" files. diff --git a/lib/iris/fileformats/um/_optimal_array_structuring.py b/lib/iris/fileformats/um/_optimal_array_structuring.py index 2793d47187..3fd892808b 100644 --- a/lib/iris/fileformats/um/_optimal_array_structuring.py +++ b/lib/iris/fileformats/um/_optimal_array_structuring.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """A module to provide an optimal array structure calculation.""" diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index b93b192bbd..d2e51a3257 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides UM/CF phenomenon translations. diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 4e5004ff10..c8e02a40cf 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides an interface to manage URI scheme support in iris. diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index 9def0ada98..d2d3b5fd41 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A module to provide convenient file format identification through a combination of filename extension and file based *magic* numbers. diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index cc82433e85..cd950828be 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Cube functions for iteration in step. diff --git a/lib/iris/palette.py b/lib/iris/palette.py index 3ba17ffc97..f640cf5687 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Load, configure and register color map palettes and initialise color map meta-data mappings. diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 0d0e65d648..535bed3a64 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provide conversion to and from Pandas data structures. diff --git a/lib/iris/plot.py b/lib/iris/plot.py index b32b45195a..977cbbcfc2 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` interface. diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 12b5e80697..15f4cf11e2 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ High-level plotting extensions to :mod:`iris.plot`. diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index 3d00d3bb3b..7bbbca83a9 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Contains symbol definitions for use with :func:`iris.plot.symbols`. diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 5529b899c5..83fdb6af89 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Provides testing capabilities and customisations specific to Iris. diff --git a/lib/iris/tests/experimental/__init__.py b/lib/iris/tests/experimental/__init__.py index fa2390c45b..d31931720c 100644 --- a/lib/iris/tests/experimental/__init__.py +++ b/lib/iris/tests/experimental/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Experimental code is tested in this package. diff --git a/lib/iris/tests/experimental/regrid/__init__.py b/lib/iris/tests/experimental/regrid/__init__.py index 653505e3d5..6837b12e91 100644 --- a/lib/iris/tests/experimental/regrid/__init__.py +++ b/lib/iris/tests/experimental/regrid/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Regridding code is tested in this package. diff --git a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index 07961a319a..93b1a6d3e6 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test area weighted regridding. diff --git a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py index 467c040eb3..2c7bad59ff 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Tests for :func:`iris.experimental.regrid.regrid_conservative_via_esmpy`. diff --git a/lib/iris/tests/experimental/test_raster.py b/lib/iris/tests/experimental/test_raster.py index ffd03e6f4d..736263f196 100644 --- a/lib/iris/tests/experimental/test_raster.py +++ b/lib/iris/tests/experimental/test_raster.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. import iris.tests as tests # isort:skip import PIL.Image diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 5ee555cb6e..3c440264f9 100755 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # !/usr/bin/env python """ Contains Iris graphic testing utilities diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index 4af7f4726d..1c29d4e551 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # !/usr/bin/env python """ Provides "diff-like" comparison of images. diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py index 02ddaad2cb..cd4c83e9b1 100755 --- a/lib/iris/tests/graphics/recreate_imagerepo.py +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # !/usr/bin/env python """ Updates imagerepo.json based on the baseline images diff --git a/lib/iris/tests/integration/__init__.py b/lib/iris/tests/integration/__init__.py index 71b911cbb0..29a99030dd 100644 --- a/lib/iris/tests/integration/__init__.py +++ b/lib/iris/tests/integration/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris` package.""" diff --git a/lib/iris/tests/integration/analysis/__init__.py b/lib/iris/tests/integration/analysis/__init__.py index 20b6250b70..4ec86f2d5a 100644 --- a/lib/iris/tests/integration/analysis/__init__.py +++ b/lib/iris/tests/integration/analysis/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris.analysis` package.""" diff --git a/lib/iris/tests/integration/analysis/test_area_weighted.py b/lib/iris/tests/integration/analysis/test_area_weighted.py index d01da79a56..49c80d7bba 100644 --- a/lib/iris/tests/integration/analysis/test_area_weighted.py +++ b/lib/iris/tests/integration/analysis/test_area_weighted.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for area weighted regridding.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/aux_factory/__init__.py b/lib/iris/tests/integration/aux_factory/__init__.py index 58ba6fb82b..3ee14d5add 100644 --- a/lib/iris/tests/integration/aux_factory/__init__.py +++ b/lib/iris/tests/integration/aux_factory/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris.aux_factory` package.""" diff --git a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py index 4b2464b272..15f65d52ad 100644 --- a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Integratation tests for the `iris.aux_factory.OceanSigmaZFactory` class. diff --git a/lib/iris/tests/integration/concatenate/__init__.py b/lib/iris/tests/integration/concatenate/__init__.py index fb136098ee..d15b201abe 100644 --- a/lib/iris/tests/integration/concatenate/__init__.py +++ b/lib/iris/tests/integration/concatenate/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris._concatenate` package.""" diff --git a/lib/iris/tests/integration/concatenate/test_concatenate.py b/lib/iris/tests/integration/concatenate/test_concatenate.py index 2543e2931b..9bd6bcb0c5 100644 --- a/lib/iris/tests/integration/concatenate/test_concatenate.py +++ b/lib/iris/tests/integration/concatenate/test_concatenate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Integration tests for concatenating cubes with differing time coord epochs using :func:`iris.util.unify_time_units`. diff --git a/lib/iris/tests/integration/experimental/__init__.py b/lib/iris/tests/integration/experimental/__init__.py index 269cf3dd9a..79722df7a3 100644 --- a/lib/iris/tests/integration/experimental/__init__.py +++ b/lib/iris/tests/integration/experimental/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris.experimental` package.""" diff --git a/lib/iris/tests/integration/experimental/test_CubeRepresentation.py b/lib/iris/tests/integration/experimental/test_CubeRepresentation.py index 48a3e51b52..0c1386d59f 100644 --- a/lib/iris/tests/integration/experimental/test_CubeRepresentation.py +++ b/lib/iris/tests/integration/experimental/test_CubeRepresentation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for cube html representation.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py index 742adc8c15..4ae48fe6f9 100644 --- a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py +++ b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for experimental regridding.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py index d94e85d2f5..1bd39695ec 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_load.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Integration tests for NetCDF-UGRID file loading. diff --git a/lib/iris/tests/integration/experimental/test_ugrid_save.py b/lib/iris/tests/integration/experimental/test_ugrid_save.py index 803ac71caa..710ed6941d 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_save.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Integration tests for NetCDF-UGRID file saving. diff --git a/lib/iris/tests/integration/fast_load/__init__.py b/lib/iris/tests/integration/fast_load/__init__.py index a94785ca58..40fc56f129 100644 --- a/lib/iris/tests/integration/fast_load/__init__.py +++ b/lib/iris/tests/integration/fast_load/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for :mod:`iris.fileformats.um` fast load functions.""" diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index 318292615b..a37f1eef07 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for fast-loading FF and PP files.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/integration/merge/__init__.py b/lib/iris/tests/integration/merge/__init__.py index 9374976532..ae16d4fe45 100644 --- a/lib/iris/tests/integration/merge/__init__.py +++ b/lib/iris/tests/integration/merge/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris._merge` package.""" diff --git a/lib/iris/tests/integration/merge/test_merge.py b/lib/iris/tests/integration/merge/test_merge.py index f5f92a7a7d..87b52fd85d 100644 --- a/lib/iris/tests/integration/merge/test_merge.py +++ b/lib/iris/tests/integration/merge/test_merge.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Integration tests for merging cubes. diff --git a/lib/iris/tests/integration/netcdf/__init__.py b/lib/iris/tests/integration/netcdf/__init__.py index f500b52520..bd62b4d988 100644 --- a/lib/iris/tests/integration/netcdf/__init__.py +++ b/lib/iris/tests/integration/netcdf/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for loading and saving netcdf files.""" diff --git a/lib/iris/tests/integration/netcdf/test__dask_locks.py b/lib/iris/tests/integration/netcdf/test__dask_locks.py index c41af1b356..70891bc40c 100644 --- a/lib/iris/tests/integration/netcdf/test__dask_locks.py +++ b/lib/iris/tests/integration/netcdf/test__dask_locks.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :mod:`iris.fileformats.netcdf._dask_locks` package. diff --git a/lib/iris/tests/integration/netcdf/test_attributes.py b/lib/iris/tests/integration/netcdf/test_attributes.py index a73d6c7d49..aab91bcb31 100644 --- a/lib/iris/tests/integration/netcdf/test_attributes.py +++ b/lib/iris/tests/integration/netcdf/test_attributes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for attribute-related loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/netcdf/test_aux_factories.py b/lib/iris/tests/integration/netcdf/test_aux_factories.py index d89f275336..6b3dde6fd1 100644 --- a/lib/iris/tests/integration/netcdf/test_aux_factories.py +++ b/lib/iris/tests/integration/netcdf/test_aux_factories.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for aux-factory-related loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py index 3175664b4c..b7b21911e5 100644 --- a/lib/iris/tests/integration/netcdf/test_coord_systems.py +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for coord-system-related loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index 09f6235aab..d3f2ce22c4 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Integration tests for delayed saving. """ diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py index 6214f09e7e..0fc619e4cb 100644 --- a/lib/iris/tests/integration/netcdf/test_general.py +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/integration/netcdf/test_self_referencing.py b/lib/iris/tests/integration/netcdf/test_self_referencing.py index 554fabb4fc..4e5da18bbd 100644 --- a/lib/iris/tests/integration/netcdf/test_self_referencing.py +++ b/lib/iris/tests/integration/netcdf/test_self_referencing.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for iris#3367 - loading a self-referencing NetCDF file.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/netcdf/test_thread_safety.py b/lib/iris/tests/integration/netcdf/test_thread_safety.py index c5779250a2..916cbf67e1 100644 --- a/lib/iris/tests/integration/netcdf/test_thread_safety.py +++ b/lib/iris/tests/integration/netcdf/test_thread_safety.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Integration tests covering thread safety during loading/saving netcdf files. diff --git a/lib/iris/tests/integration/plot/__init__.py b/lib/iris/tests/integration/plot/__init__.py index aafa488e2d..c67303c0f3 100644 --- a/lib/iris/tests/integration/plot/__init__.py +++ b/lib/iris/tests/integration/plot/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris.plot` package.""" diff --git a/lib/iris/tests/integration/plot/test_animate.py b/lib/iris/tests/integration/plot/test_animate.py index ef19dbb108..1354ef4289 100644 --- a/lib/iris/tests/integration/plot/test_animate.py +++ b/lib/iris/tests/integration/plot/test_animate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Integration tests for :func:`iris.plot.animate`. diff --git a/lib/iris/tests/integration/plot/test_colorbar.py b/lib/iris/tests/integration/plot/test_colorbar.py index a306e6c82f..c742564c7d 100644 --- a/lib/iris/tests/integration/plot/test_colorbar.py +++ b/lib/iris/tests/integration/plot/test_colorbar.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test interaction between :mod:`iris.plot` and :func:`matplotlib.pyplot.colorbar` diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py index d438c09bd5..2b3a59d093 100644 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ b/lib/iris/tests/integration/plot/test_netcdftime.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test plot of time coord with non-standard calendar. diff --git a/lib/iris/tests/integration/plot/test_nzdateline.py b/lib/iris/tests/integration/plot/test_nzdateline.py index 0051549794..2c9360e9ea 100644 --- a/lib/iris/tests/integration/plot/test_nzdateline.py +++ b/lib/iris/tests/integration/plot/test_nzdateline.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test set up of limited area map extents which bridge the date line. diff --git a/lib/iris/tests/integration/plot/test_plot_2d_coords.py b/lib/iris/tests/integration/plot/test_plot_2d_coords.py index 1b95899803..673f8817d6 100644 --- a/lib/iris/tests/integration/plot/test_plot_2d_coords.py +++ b/lib/iris/tests/integration/plot/test_plot_2d_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test plots with two dimensional coordinates. diff --git a/lib/iris/tests/integration/plot/test_vector_plots.py b/lib/iris/tests/integration/plot/test_vector_plots.py index 37f506bd17..652a205fd8 100644 --- a/lib/iris/tests/integration/plot/test_vector_plots.py +++ b/lib/iris/tests/integration/plot/test_vector_plots.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test some key usages of :func:`iris.plot.quiver`. diff --git a/lib/iris/tests/integration/test_Datums.py b/lib/iris/tests/integration/test_Datums.py index 43287c7040..8325ad901a 100755 --- a/lib/iris/tests/integration/test_Datums.py +++ b/lib/iris/tests/integration/test_Datums.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for :class:`iris.coord_systems` datum support.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_PartialDateTime.py b/lib/iris/tests/integration/test_PartialDateTime.py index 563af1035c..ed995beda7 100644 --- a/lib/iris/tests/integration/test_PartialDateTime.py +++ b/lib/iris/tests/integration/test_PartialDateTime.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for :class:`iris.time.PartialDateTime`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_climatology.py b/lib/iris/tests/integration/test_climatology.py index 54d43858fb..f15428eb9d 100644 --- a/lib/iris/tests/integration/test_climatology.py +++ b/lib/iris/tests/integration/test_climatology.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_cube.py b/lib/iris/tests/integration/test_cube.py index ad6666d28e..8f3ac5fb48 100644 --- a/lib/iris/tests/integration/test_cube.py +++ b/lib/iris/tests/integration/test_cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for :class:`iris.cube.Cube`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_ff.py b/lib/iris/tests/integration/test_ff.py index 0b0ccf4c5c..b613da385f 100644 --- a/lib/iris/tests/integration/test_ff.py +++ b/lib/iris/tests/integration/test_ff.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for loading LBC fieldsfiles.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_new_axis.py b/lib/iris/tests/integration/test_new_axis.py index 876eccbb63..7c8da13ae5 100644 --- a/lib/iris/tests/integration/test_new_axis.py +++ b/lib/iris/tests/integration/test_new_axis.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for :func:`iris.util.new_axis`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_pickle.py b/lib/iris/tests/integration/test_pickle.py index fa5ddbd73e..7317855512 100644 --- a/lib/iris/tests/integration/test_pickle.py +++ b/lib/iris/tests/integration/test_pickle.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for pickling things.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py index 026bdae58a..bab925bd7e 100644 --- a/lib/iris/tests/integration/test_pp.py +++ b/lib/iris/tests/integration/test_pp.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for loading and saving PP files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_pp_constrained_load_cubes.py b/lib/iris/tests/integration/test_pp_constrained_load_cubes.py index 7ddf39b2ff..5ba8978ed1 100644 --- a/lib/iris/tests/integration/test_pp_constrained_load_cubes.py +++ b/lib/iris/tests/integration/test_pp_constrained_load_cubes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for :func:`iris.fileformats.rules.load_cubes`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_regrid_equivalence.py b/lib/iris/tests/integration/test_regrid_equivalence.py index 09b47072e0..6bcb1ce403 100644 --- a/lib/iris/tests/integration/test_regrid_equivalence.py +++ b/lib/iris/tests/integration/test_regrid_equivalence.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Tests to check the validity of replacing "iris.analysis._interpolate.regrid`('nearest')" with diff --git a/lib/iris/tests/integration/test_regridding.py b/lib/iris/tests/integration/test_regridding.py index 3e87a8d0aa..44e9fef22e 100644 --- a/lib/iris/tests/integration/test_regridding.py +++ b/lib/iris/tests/integration/test_regridding.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for regridding.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_subset.py b/lib/iris/tests/integration/test_subset.py index bc2029afba..457616cee3 100644 --- a/lib/iris/tests/integration/test_subset.py +++ b/lib/iris/tests/integration/test_subset.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for subset.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_trajectory.py b/lib/iris/tests/integration/test_trajectory.py index a8e3acaa41..abe8fd0a2e 100644 --- a/lib/iris/tests/integration/test_trajectory.py +++ b/lib/iris/tests/integration/test_trajectory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for :mod:`iris.analysis.trajectory`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/um/__init__.py b/lib/iris/tests/integration/um/__init__.py index a94785ca58..40fc56f129 100644 --- a/lib/iris/tests/integration/um/__init__.py +++ b/lib/iris/tests/integration/um/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Integration tests for :mod:`iris.fileformats.um` fast load functions.""" diff --git a/lib/iris/tests/integration/um/test_fieldsfile.py b/lib/iris/tests/integration/um/test_fieldsfile.py index 56b88c2b6d..2aff7a2989 100644 --- a/lib/iris/tests/integration/um/test_fieldsfile.py +++ b/lib/iris/tests/integration/um/test_fieldsfile.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the fast loading of structured Fieldsfiles. diff --git a/lib/iris/tests/pp.py b/lib/iris/tests/pp.py index d8eb3256c4..3e07ccbd7f 100644 --- a/lib/iris/tests/pp.py +++ b/lib/iris/tests/pp.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. import contextlib import os.path diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index 632dc95e20..c66c13bba5 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A collection of routines which create standard Cubes/files for test purposes. diff --git a/lib/iris/tests/stock/_stock_2d_latlons.py b/lib/iris/tests/stock/_stock_2d_latlons.py index 4733a15305..889f8bce12 100644 --- a/lib/iris/tests/stock/_stock_2d_latlons.py +++ b/lib/iris/tests/stock/_stock_2d_latlons.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Extra stock routines for making and manipulating cubes with 2d coordinates, to mimic ocean grid data. diff --git a/lib/iris/tests/stock/mesh.py b/lib/iris/tests/stock/mesh.py index da226a3790..7726849252 100644 --- a/lib/iris/tests/stock/mesh.py +++ b/lib/iris/tests/stock/mesh.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Helper functions making objects for unstructured mesh testing.""" diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py index bf93f01f6b..0f6a08b596 100644 --- a/lib/iris/tests/stock/netcdf.py +++ b/lib/iris/tests/stock/netcdf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Routines for generating synthetic NetCDF files from template headers.""" from pathlib import Path diff --git a/lib/iris/tests/system_test.py b/lib/iris/tests/system_test.py index 745163b485..440b544f94 100644 --- a/lib/iris/tests/system_test.py +++ b/lib/iris/tests/system_test.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ diff --git a/lib/iris/tests/test_abf.py b/lib/iris/tests/test_abf.py index 0b398879fc..92ed337710 100644 --- a/lib/iris/tests/test_abf.py +++ b/lib/iris/tests/test_abf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_aggregate_by.py b/lib/iris/tests/test_aggregate_by.py index e5614f6b63..e34d2ff1bd 100644 --- a/lib/iris/tests/test_aggregate_by.py +++ b/lib/iris/tests/test_aggregate_by.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index 0d88a23055..f611e25c4e 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else diff --git a/lib/iris/tests/test_analysis_calculus.py b/lib/iris/tests/test_analysis_calculus.py index 86cc79092b..36e008f38e 100644 --- a/lib/iris/tests/test_analysis_calculus.py +++ b/lib/iris/tests/test_analysis_calculus.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index 4d92b9a92c..c0329b72d6 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_cartography.py b/lib/iris/tests/test_cartography.py index c9647dc48e..58dccb78aa 100644 --- a/lib/iris/tests/test_cartography.py +++ b/lib/iris/tests/test_cartography.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Tests elements of the cartography module. diff --git a/lib/iris/tests/test_cdm.py b/lib/iris/tests/test_cdm.py index 0abb35c566..c748b9dfd4 100644 --- a/lib/iris/tests/test_cdm.py +++ b/lib/iris/tests/test_cdm.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test cube indexing, slicing, and extracting, and also the dot graphs. diff --git a/lib/iris/tests/test_cell.py b/lib/iris/tests/test_cell.py index 21d2603072..3925d9b0a7 100644 --- a/lib/iris/tests/test_cell.py +++ b/lib/iris/tests/test_cell.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py index 3abd6b981b..70f24478d2 100644 --- a/lib/iris/tests/test_cf.py +++ b/lib/iris/tests/test_cf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the cf module. diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 54309e3906..e3a1d2eaf3 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris.tests first so that some things can be initialised before # importing anything else @@ -23,9 +22,8 @@ LICENSE_TEMPLATE = """# Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details.""" +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details.""" # Guess iris repo directory of Iris - realpath is used to mitigate against # Python finding the iris package via a symlink. diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index ec92838466..7d28d48c31 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the cube concatenate mechanism. diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py index e568105f91..b034525ff2 100644 --- a/lib/iris/tests/test_constraints.py +++ b/lib/iris/tests/test_constraints.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the constrained cube loading mechanism. diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index ea99ae06df..5eb12ba1f3 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index 2e5aef249c..e62a94f080 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_cube.py b/lib/iris/tests/test_cube.py index c9b76539d2..d13db758a5 100644 --- a/lib/iris/tests/test_cube.py +++ b/lib/iris/tests/test_cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_cube_to_pp.py b/lib/iris/tests/test_cube_to_pp.py index a6fc5e3149..1a6be27f9c 100644 --- a/lib/iris/tests/test_cube_to_pp.py +++ b/lib/iris/tests/test_cube_to_pp.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_ff.py b/lib/iris/tests/test_ff.py index 95f3b1493b..1abfafdac1 100644 --- a/lib/iris/tests/test_ff.py +++ b/lib/iris/tests/test_ff.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the Fieldsfile file loading plugin and FFHeader. diff --git a/lib/iris/tests/test_file_load.py b/lib/iris/tests/test_file_load.py index 0fe69ff583..d0b9b2461a 100644 --- a/lib/iris/tests/test_file_load.py +++ b/lib/iris/tests/test_file_load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the file loading mechanism. diff --git a/lib/iris/tests/test_file_save.py b/lib/iris/tests/test_file_save.py index 216637202a..dc901db715 100644 --- a/lib/iris/tests/test_file_save.py +++ b/lib/iris/tests/test_file_save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the file saving mechanism. diff --git a/lib/iris/tests/test_hybrid.py b/lib/iris/tests/test_hybrid.py index b070f36a7a..e3e5076650 100644 --- a/lib/iris/tests/test_hybrid.py +++ b/lib/iris/tests/test_hybrid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the hybrid vertical coordinate representations. diff --git a/lib/iris/tests/test_image_json.py b/lib/iris/tests/test_image_json.py index b5213156f8..75e40822dc 100644 --- a/lib/iris/tests/test_image_json.py +++ b/lib/iris/tests/test_image_json.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_imports.py b/lib/iris/tests/test_imports.py index ca0d262ec4..46c7cae723 100644 --- a/lib/iris/tests/test_imports.py +++ b/lib/iris/tests/test_imports.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_intersect.py b/lib/iris/tests/test_intersect.py index 01e9f79af5..29603f61a8 100644 --- a/lib/iris/tests/test_intersect.py +++ b/lib/iris/tests/test_intersect.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the intersection of Coords diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index 82da82cfa9..852944eee5 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the io/__init__.py module. diff --git a/lib/iris/tests/test_iterate.py b/lib/iris/tests/test_iterate.py index 6317ef32b5..1bee6db74f 100644 --- a/lib/iris/tests/test_iterate.py +++ b/lib/iris/tests/test_iterate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the iteration of cubes in step. diff --git a/lib/iris/tests/test_lazy_aggregate_by.py b/lib/iris/tests/test_lazy_aggregate_by.py index 57b748e52f..690198c25a 100644 --- a/lib/iris/tests/test_lazy_aggregate_by.py +++ b/lib/iris/tests/test_lazy_aggregate_by.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. import unittest from iris._lazy_data import as_lazy_data diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index adb33924e5..1189f74b55 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the main loading API. diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py index 202c319b61..6ea4571630 100644 --- a/lib/iris/tests/test_mapping.py +++ b/lib/iris/tests/test_mapping.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Tests map creation. diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py index e53bbfb5f3..054fd3a20b 100644 --- a/lib/iris/tests/test_merge.py +++ b/lib/iris/tests/test_merge.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the cube merging mechanism. diff --git a/lib/iris/tests/test_name.py b/lib/iris/tests/test_name.py index b4e91bafd7..51bc92c28c 100644 --- a/lib/iris/tests/test_name.py +++ b/lib/iris/tests/test_name.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Tests for NAME loading.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 2e389942bf..793f8df876 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test CF-NetCDF file loading and saving. diff --git a/lib/iris/tests/test_nimrod.py b/lib/iris/tests/test_nimrod.py index 6d62623198..ed60a516c8 100644 --- a/lib/iris/tests/test_nimrod.py +++ b/lib/iris/tests/test_nimrod.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_peak.py b/lib/iris/tests/test_peak.py index a2b6894149..1d9dd68cc1 100644 --- a/lib/iris/tests/test_peak.py +++ b/lib/iris/tests/test_peak.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/test_pickling.py b/lib/iris/tests/test_pickling.py index 26247e795b..342b07cb03 100644 --- a/lib/iris/tests/test_pickling.py +++ b/lib/iris/tests/test_pickling.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test pickling of Iris objects. diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 55c912f423..150d521e34 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_pp_cf.py b/lib/iris/tests/test_pp_cf.py index 49bedaf1e2..44650919e1 100644 --- a/lib/iris/tests/test_pp_cf.py +++ b/lib/iris/tests/test_pp_cf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_pp_module.py b/lib/iris/tests/test_pp_module.py index ca7f1c50eb..b8606e3120 100644 --- a/lib/iris/tests/test_pp_module.py +++ b/lib/iris/tests/test_pp_module.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_pp_stash.py b/lib/iris/tests/test_pp_stash.py index 42390ab2b3..733d1697de 100644 --- a/lib/iris/tests/test_pp_stash.py +++ b/lib/iris/tests/test_pp_stash.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_pp_to_cube.py b/lib/iris/tests/test_pp_to_cube.py index eb006fb88e..d9c47c7841 100644 --- a/lib/iris/tests/test_pp_to_cube.py +++ b/lib/iris/tests/test_pp_to_cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index df2db12de6..c42a8989fb 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Tests the high-level plotting interface. diff --git a/lib/iris/tests/test_std_names.py b/lib/iris/tests/test_std_names.py index 48d32acbee..bc50903ab6 100644 --- a/lib/iris/tests/test_std_names.py +++ b/lib/iris/tests/test_std_names.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_uri_callback.py b/lib/iris/tests/test_uri_callback.py index 67831945c5..62ae1b7fc8 100644 --- a/lib/iris/tests/test_uri_callback.py +++ b/lib/iris/tests/test_uri_callback.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py index d8d5d73e95..cf1dc44755 100644 --- a/lib/iris/tests/test_util.py +++ b/lib/iris/tests/test_util.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test iris.util diff --git a/lib/iris/tests/unit/__init__.py b/lib/iris/tests/unit/__init__.py index 50929c8020..c03d437279 100644 --- a/lib/iris/tests/unit/__init__.py +++ b/lib/iris/tests/unit/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris` package.""" diff --git a/lib/iris/tests/unit/analysis/__init__.py b/lib/iris/tests/unit/analysis/__init__.py index 974b4e3584..4f957bd501 100644 --- a/lib/iris/tests/unit/analysis/__init__.py +++ b/lib/iris/tests/unit/analysis/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis` package.""" diff --git a/lib/iris/tests/unit/analysis/area_weighted/__init__.py b/lib/iris/tests/unit/analysis/area_weighted/__init__.py index 464036a6dd..2cccaec14c 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/__init__.py +++ b/lib/iris/tests/unit/analysis/area_weighted/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis._area_weighted` module.""" diff --git a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py index ecaa028ab3..2d873ad011 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py +++ b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :class:`iris.analysis._area_weighted.AreaWeightedRegridder`. diff --git a/lib/iris/tests/unit/analysis/cartography/__init__.py b/lib/iris/tests/unit/analysis/cartography/__init__.py index 625a6fa141..ff3db13198 100644 --- a/lib/iris/tests/unit/analysis/cartography/__init__.py +++ b/lib/iris/tests/unit/analysis/cartography/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis.cartography` module.""" diff --git a/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py b/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py index 612e5d8ecf..23c7097902 100644 --- a/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py +++ b/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.analysis.cartography._get_lon_lat_coords""" import pytest diff --git a/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py b/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py index a44661292f..e5f6964e22 100644 --- a/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py +++ b/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.analysis.cartography._quadrant_area` function""" diff --git a/lib/iris/tests/unit/analysis/cartography/test__xy_range.py b/lib/iris/tests/unit/analysis/cartography/test__xy_range.py index 009c97fc34..eeafc533e4 100644 --- a/lib/iris/tests/unit/analysis/cartography/test__xy_range.py +++ b/lib/iris/tests/unit/analysis/cartography/test__xy_range.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.analysis.cartography._xy_range`""" diff --git a/lib/iris/tests/unit/analysis/cartography/test_area_weights.py b/lib/iris/tests/unit/analysis/cartography/test_area_weights.py index 696841ddd6..29c906f0d1 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_area_weights.py +++ b/lib/iris/tests/unit/analysis/cartography/test_area_weights.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.analysis.cartography.area_weights` function""" diff --git a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py index 810851362e..f3f8c81583 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py +++ b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:`iris.analysis.cartography.gridcell_angles`. diff --git a/lib/iris/tests/unit/analysis/cartography/test_project.py b/lib/iris/tests/unit/analysis/cartography/test_project.py index c00830aacc..7b52f4492e 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_project.py +++ b/lib/iris/tests/unit/analysis/cartography/test_project.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.analysis.cartography.project`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py index f5c882a983..389dfaeb3a 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:`iris.analysis.cartography.rotate_grid_vectors`. diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index 212a39bf2d..af1a2b8b42 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:`iris.analysis.cartography.rotate_winds`. diff --git a/lib/iris/tests/unit/analysis/geometry/__init__.py b/lib/iris/tests/unit/analysis/geometry/__init__.py index c57f5e246a..25fa6af6cb 100644 --- a/lib/iris/tests/unit/analysis/geometry/__init__.py +++ b/lib/iris/tests/unit/analysis/geometry/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis.geometry` module.""" diff --git a/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py b/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py index 2509ac1a92..bec45d8b17 100644 --- a/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py +++ b/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.analysis.geometry._extract_relevant_cube_slice`. diff --git a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py index 62ab1ae283..ae0e47292d 100644 --- a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py +++ b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.analysis.geometry.geometry_area_weights` function. diff --git a/lib/iris/tests/unit/analysis/interpolation/__init__.py b/lib/iris/tests/unit/analysis/interpolation/__init__.py index 3825dacda3..01208c1aba 100644 --- a/lib/iris/tests/unit/analysis/interpolation/__init__.py +++ b/lib/iris/tests/unit/analysis/interpolation/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis._interpolation` package.""" diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py index a91a08dcb8..574a25ee7d 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :class:`iris.analysis._interpolation.RectilinearInterpolator`. diff --git a/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py b/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py index 54e54bc304..26bc32c69f 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.analysis._interpolation.get_xy_dim_coords`. diff --git a/lib/iris/tests/unit/analysis/maths/__init__.py b/lib/iris/tests/unit/analysis/maths/__init__.py index c259bdeff6..c77f8ede37 100644 --- a/lib/iris/tests/unit/analysis/maths/__init__.py +++ b/lib/iris/tests/unit/analysis/maths/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis.maths` module.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py b/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py index 11664af115..17c0aeac15 100644 --- a/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py +++ b/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for cube arithmetic with dask arrays.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py b/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py index 57e012e1c9..85d1c363e6 100644 --- a/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py +++ b/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for cube arithmetic involving derived (i.e. factory) coords.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py index e1255ef9d8..c55d19f000 100644 --- a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py +++ b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for cube arithmetic involving MeshCoords.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test__get_dtype.py b/lib/iris/tests/unit/analysis/maths/test__get_dtype.py index 220b728b32..81cfdee3fb 100644 --- a/lib/iris/tests/unit/analysis/maths/test__get_dtype.py +++ b/lib/iris/tests/unit/analysis/maths/test__get_dtype.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:`iris.analysis.maths._get_dtype`. diff --git a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py index bd81a96fbd..2c97737973 100644 --- a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py +++ b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:`iris.analysis.maths._inplace_common_checks`. diff --git a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py index c422e366be..3f69118e0f 100644 --- a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py +++ b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:`iris.analysis.maths._output_dtype`. diff --git a/lib/iris/tests/unit/analysis/maths/test_add.py b/lib/iris/tests/unit/analysis/maths/test_add.py index 1ca7f7c244..69078b9a96 100644 --- a/lib/iris/tests/unit/analysis/maths/test_add.py +++ b/lib/iris/tests/unit/analysis/maths/test_add.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.analysis.maths.add` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test_divide.py b/lib/iris/tests/unit/analysis/maths/test_divide.py index 4bd202e037..17e5105126 100644 --- a/lib/iris/tests/unit/analysis/maths/test_divide.py +++ b/lib/iris/tests/unit/analysis/maths/test_divide.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.analysis.maths.divide` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test_multiply.py b/lib/iris/tests/unit/analysis/maths/test_multiply.py index 266342605a..945a86a4d1 100644 --- a/lib/iris/tests/unit/analysis/maths/test_multiply.py +++ b/lib/iris/tests/unit/analysis/maths/test_multiply.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.analysis.maths.multiply` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test_subtract.py b/lib/iris/tests/unit/analysis/maths/test_subtract.py index f7a9df34d0..6812176412 100644 --- a/lib/iris/tests/unit/analysis/maths/test_subtract.py +++ b/lib/iris/tests/unit/analysis/maths/test_subtract.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.analysis.maths.subtract` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/regrid/__init__.py b/lib/iris/tests/unit/analysis/regrid/__init__.py index a0a0fd0a6b..c4e5c119ea 100644 --- a/lib/iris/tests/unit/analysis/regrid/__init__.py +++ b/lib/iris/tests/unit/analysis/regrid/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis._regrid` module.""" diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index a018507fb3..4855b92332 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.analysis._regrid.RectilinearRegridder`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py index 9b0160aee4..16639c1649 100644 --- a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.analysis._regrid.CurvilinearRegridder`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py b/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py index 67218194c2..cd80f89470 100644 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py +++ b/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis.scipy_interpolate` module.""" diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py index f0aa027baa..9bf9621fb4 100644 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py +++ b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.analysis._scipy_interpolate._RegularGridInterpolator` class.""" diff --git a/lib/iris/tests/unit/analysis/stats/__init__.py b/lib/iris/tests/unit/analysis/stats/__init__.py index 0b896d648d..8787858158 100644 --- a/lib/iris/tests/unit/analysis/stats/__init__.py +++ b/lib/iris/tests/unit/analysis/stats/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis.stats` module.""" diff --git a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py index 63cf4e2abe..648aeb8a64 100644 --- a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py +++ b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.analysis.stats.pearsonr` function.""" # Import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_Aggregator.py b/lib/iris/tests/unit/analysis/test_Aggregator.py index 45081ad07f..a7029d7d33 100644 --- a/lib/iris/tests/unit/analysis/test_Aggregator.py +++ b/lib/iris/tests/unit/analysis/test_Aggregator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.analysis.Aggregator` class instance.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_AreaWeighted.py b/lib/iris/tests/unit/analysis/test_AreaWeighted.py index 2454e0817c..1e16e4bcb2 100644 --- a/lib/iris/tests/unit/analysis/test_AreaWeighted.py +++ b/lib/iris/tests/unit/analysis/test_AreaWeighted.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.analysis.AreaWeighted`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_COUNT.py b/lib/iris/tests/unit/analysis/test_COUNT.py index 96274f7cd0..fa51565474 100644 --- a/lib/iris/tests/unit/analysis/test_COUNT.py +++ b/lib/iris/tests/unit/analysis/test_COUNT.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.COUNT` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_Linear.py b/lib/iris/tests/unit/analysis/test_Linear.py index 27565f8c51..e98a6f585e 100644 --- a/lib/iris/tests/unit/analysis/test_Linear.py +++ b/lib/iris/tests/unit/analysis/test_Linear.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.analysis.Linear`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_MAX.py b/lib/iris/tests/unit/analysis/test_MAX.py index 91d4daf1f0..8753c5e660 100644 --- a/lib/iris/tests/unit/analysis/test_MAX.py +++ b/lib/iris/tests/unit/analysis/test_MAX.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.MAX` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_MAX_RUN.py b/lib/iris/tests/unit/analysis/test_MAX_RUN.py index 00de383f7a..13a940f6fa 100755 --- a/lib/iris/tests/unit/analysis/test_MAX_RUN.py +++ b/lib/iris/tests/unit/analysis/test_MAX_RUN.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.MAX_RUN` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_MEAN.py b/lib/iris/tests/unit/analysis/test_MEAN.py index 18e2b4ca6c..9b8ccc1aa7 100644 --- a/lib/iris/tests/unit/analysis/test_MEAN.py +++ b/lib/iris/tests/unit/analysis/test_MEAN.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.MEAN` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_MIN.py b/lib/iris/tests/unit/analysis/test_MIN.py index f12790f0f1..06757517d3 100644 --- a/lib/iris/tests/unit/analysis/test_MIN.py +++ b/lib/iris/tests/unit/analysis/test_MIN.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.MIN` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_Nearest.py b/lib/iris/tests/unit/analysis/test_Nearest.py index f3736d2cf3..053fca1907 100644 --- a/lib/iris/tests/unit/analysis/test_Nearest.py +++ b/lib/iris/tests/unit/analysis/test_Nearest.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.analysis.Nearest`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_PERCENTILE.py b/lib/iris/tests/unit/analysis/test_PERCENTILE.py index a29516c604..d841619ccc 100644 --- a/lib/iris/tests/unit/analysis/test_PERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_PERCENTILE.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.PERCENTILE` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_PROPORTION.py b/lib/iris/tests/unit/analysis/test_PROPORTION.py index b7118241af..dc890463ae 100644 --- a/lib/iris/tests/unit/analysis/test_PROPORTION.py +++ b/lib/iris/tests/unit/analysis/test_PROPORTION.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.PROPORTION` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py index f11cd7a8d3..0137a50019 100644 --- a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance. diff --git a/lib/iris/tests/unit/analysis/test_PointInCell.py b/lib/iris/tests/unit/analysis/test_PointInCell.py index 2570465245..83453c26d1 100644 --- a/lib/iris/tests/unit/analysis/test_PointInCell.py +++ b/lib/iris/tests/unit/analysis/test_PointInCell.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.analysis.PointInCell`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_RMS.py b/lib/iris/tests/unit/analysis/test_RMS.py index 74f309ce00..f5da089a9c 100644 --- a/lib/iris/tests/unit/analysis/test_RMS.py +++ b/lib/iris/tests/unit/analysis/test_RMS.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.RMS` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_STD_DEV.py b/lib/iris/tests/unit/analysis/test_STD_DEV.py index 978bdb4ddf..0abf4f9dc3 100644 --- a/lib/iris/tests/unit/analysis/test_STD_DEV.py +++ b/lib/iris/tests/unit/analysis/test_STD_DEV.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.STD_DEV` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_SUM.py b/lib/iris/tests/unit/analysis/test_SUM.py index 64699b442f..90be890797 100644 --- a/lib/iris/tests/unit/analysis/test_SUM.py +++ b/lib/iris/tests/unit/analysis/test_SUM.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.SUM` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_VARIANCE.py b/lib/iris/tests/unit/analysis/test_VARIANCE.py index 857bc7e1d2..e4dde970a9 100644 --- a/lib/iris/tests/unit/analysis/test_VARIANCE.py +++ b/lib/iris/tests/unit/analysis/test_VARIANCE.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.VARIANCE` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_WPERCENTILE.py b/lib/iris/tests/unit/analysis/test_WPERCENTILE.py index a59bf4ce9c..c8bcf5018e 100644 --- a/lib/iris/tests/unit/analysis/test_WPERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_WPERCENTILE.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis.PERCENTILE` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py index 0cd808d1c7..a1306063b6 100644 --- a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance. diff --git a/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py b/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py index 505a00df78..f4cb94f466 100644 --- a/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py +++ b/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :data:`iris.analysis._axis_to_single_trailing` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/trajectory/__init__.py b/lib/iris/tests/unit/analysis/trajectory/__init__.py index 55d3ebd8bc..8033fd8c30 100644 --- a/lib/iris/tests/unit/analysis/trajectory/__init__.py +++ b/lib/iris/tests/unit/analysis/trajectory/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.analysis.trajectory` module.""" diff --git a/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py b/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py index 32c41b78db..c8971a897e 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :class:`iris.analysis.trajectory.Trajectory`. diff --git a/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py b/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py index a652ceb72e..f70c3e7518 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :class:`iris.analysis.trajectory.UnstructuredNearestNeigbourRegridder`. diff --git a/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py b/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py index 8b9e4cafa4..d30feecadd 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py +++ b/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :meth:`iris.analysis.trajectory._nearest_neighbour_indices_ndcoords`. diff --git a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py index f1b9711068..c156354f8f 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :meth:`iris.analysis.trajectory.interpolate`. diff --git a/lib/iris/tests/unit/aux_factory/__init__.py b/lib/iris/tests/unit/aux_factory/__init__.py index 00b9f1a3bd..621625e9da 100644 --- a/lib/iris/tests/unit/aux_factory/__init__.py +++ b/lib/iris/tests/unit/aux_factory/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.aux_factory` module.""" diff --git a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py index 6e417a3b38..88da4ca463 100644 --- a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.aux_factory.AtmosphereSigmaFactory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py index f8bd54093f..619a0482b6 100644 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for `iris.aux_factory.AuxCoordFactory`. diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py index 48fead3aa5..31f791f10e 100644 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.aux_factory.HybridPressureFactory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py index f588c9f001..4bd85d1e3b 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.aux_factory.OceanSFactory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py index 7a2f4c631c..349b4cfcb6 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.aux_factory.OceanSg1Factory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py index 4d1f268a1e..3304cf121d 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.aux_factory.OceanSg2Factory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py index 30d9647952..a03afa661d 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.aux_factory.OceanSigmaFactory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py index 736a883846..a191fac978 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.aux_factory.OceanSigmaZFactory` class. diff --git a/lib/iris/tests/unit/common/__init__.py b/lib/iris/tests/unit/common/__init__.py index 5380785042..60d8548652 100644 --- a/lib/iris/tests/unit/common/__init__.py +++ b/lib/iris/tests/unit/common/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.common` module.""" diff --git a/lib/iris/tests/unit/common/lenient/__init__.py b/lib/iris/tests/unit/common/lenient/__init__.py index 2a99e7a4c2..e927f5f3ac 100644 --- a/lib/iris/tests/unit/common/lenient/__init__.py +++ b/lib/iris/tests/unit/common/lenient/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.common.lenient` package.""" diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py index 62e2b24891..6bcf366a25 100644 --- a/lib/iris/tests/unit/common/lenient/test_Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test_Lenient.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.lenient.Lenient`. diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py index 44f38d9c5a..9bff110942 100644 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.lenient._Lenient`. diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_client.py b/lib/iris/tests/unit/common/lenient/test__lenient_client.py index 3a19563efc..b604e49608 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_client.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_client.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.common.lenient._lenient_client`. diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_service.py b/lib/iris/tests/unit/common/lenient/test__lenient_service.py index 9545b137ea..f6bafde5e7 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_service.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_service.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.common.lenient._lenient_service`. diff --git a/lib/iris/tests/unit/common/lenient/test__qualname.py b/lib/iris/tests/unit/common/lenient/test__qualname.py index 3deefbf30d..6e2eb23bc6 100644 --- a/lib/iris/tests/unit/common/lenient/test__qualname.py +++ b/lib/iris/tests/unit/common/lenient/test__qualname.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.common.lenient._qualname`. diff --git a/lib/iris/tests/unit/common/metadata/__init__.py b/lib/iris/tests/unit/common/metadata/__init__.py index aba33c8312..973234fb21 100644 --- a/lib/iris/tests/unit/common/metadata/__init__.py +++ b/lib/iris/tests/unit/common/metadata/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.common.metadata` package.""" diff --git a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py index 9efb43ec42..196ab48d20 100644 --- a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.metadata.AncillaryVariableMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py index f4760b3051..e7434922cf 100644 --- a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.metadata.BaseMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py index a434651206..25b287909d 100644 --- a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.metadata.CellMeasureMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py index e3b7486012..dac1f26f35 100644 --- a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.metadata.CoordMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 848431565b..382607dca5 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.metadata.CubeMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py index 155c4f99b8..1608b1c42e 100644 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`. diff --git a/lib/iris/tests/unit/common/metadata/test_hexdigest.py b/lib/iris/tests/unit/common/metadata/test_hexdigest.py index 949002af89..9a16d9252b 100644 --- a/lib/iris/tests/unit/common/metadata/test_hexdigest.py +++ b/lib/iris/tests/unit/common/metadata/test_hexdigest.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.common.metadata.hexdigest`. diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py index 9c5987f235..340b6a5355 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.common.metadata_filter`. diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py index cbb29b7161..1bf342004d 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`. diff --git a/lib/iris/tests/unit/common/mixin/__init__.py b/lib/iris/tests/unit/common/mixin/__init__.py index 493e140626..3c1f5bbc9d 100644 --- a/lib/iris/tests/unit/common/mixin/__init__.py +++ b/lib/iris/tests/unit/common/mixin/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.common.mixin` package.""" diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index 88a88be567..55d2ca5d79 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.mixin.CFVariableMixin`. diff --git a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py index 32c78b6697..7416bb9da5 100644 --- a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py +++ b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`. diff --git a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py index 8fc21f2965..634eae4cf3 100644 --- a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py +++ b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`. diff --git a/lib/iris/tests/unit/common/resolve/__init__.py b/lib/iris/tests/unit/common/resolve/__init__.py index d0b189e59d..8bfbe20970 100644 --- a/lib/iris/tests/unit/common/resolve/__init__.py +++ b/lib/iris/tests/unit/common/resolve/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.common.resolve` package.""" diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index db1759c5fc..182cbbd61c 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.common.resolve.Resolve`. diff --git a/lib/iris/tests/unit/concatenate/__init__.py b/lib/iris/tests/unit/concatenate/__init__.py index 229476f3a6..6deaf26aa0 100644 --- a/lib/iris/tests/unit/concatenate/__init__.py +++ b/lib/iris/tests/unit/concatenate/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit-test infrastructure for the :mod:`iris._concatenate` package.""" from __future__ import annotations diff --git a/lib/iris/tests/unit/concatenate/test__CoordMetaData.py b/lib/iris/tests/unit/concatenate/test__CoordMetaData.py index 6f29e1f65f..35c3cfd17b 100644 --- a/lib/iris/tests/unit/concatenate/test__CoordMetaData.py +++ b/lib/iris/tests/unit/concatenate/test__CoordMetaData.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit-tests for :class:`iris._concatenate._CoordMetaData`.""" from __future__ import annotations diff --git a/lib/iris/tests/unit/concatenate/test__CoordSignature.py b/lib/iris/tests/unit/concatenate/test__CoordSignature.py index eb62c5ec64..c5e4850170 100644 --- a/lib/iris/tests/unit/concatenate/test__CoordSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CoordSignature.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit-tests for :class:`iris._concatenate._CoordSignature`.""" from __future__ import annotations diff --git a/lib/iris/tests/unit/concatenate/test__CubeSignature.py b/lib/iris/tests/unit/concatenate/test__CubeSignature.py index cc20cdfa1f..64a25a2fad 100644 --- a/lib/iris/tests/unit/concatenate/test__CubeSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CubeSignature.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test class :class:`iris._concatenate._CubeSignature`.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index c2ca01f781..96932e11d4 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._concatenate.concatenate.py`.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/unit/config/__init__.py b/lib/iris/tests/unit/config/__init__.py index 38806c7db8..07805d4bd7 100644 --- a/lib/iris/tests/unit/config/__init__.py +++ b/lib/iris/tests/unit/config/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.config` module.""" diff --git a/lib/iris/tests/unit/config/test_NetCDF.py b/lib/iris/tests/unit/config/test_NetCDF.py index c7f7564e4e..5b691a1dc3 100644 --- a/lib/iris/tests/unit/config/test_NetCDF.py +++ b/lib/iris/tests/unit/config/test_NetCDF.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.config.NetCDF` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/constraints/__init__.py b/lib/iris/tests/unit/constraints/__init__.py index 03a987b1a1..987e88c6e7 100644 --- a/lib/iris/tests/unit/constraints/__init__.py +++ b/lib/iris/tests/unit/constraints/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris._constraints` module.""" diff --git a/lib/iris/tests/unit/constraints/test_Constraint_equality.py b/lib/iris/tests/unit/constraints/test_Constraint_equality.py index 01e61b70a7..6e0b37c3f4 100644 --- a/lib/iris/tests/unit/constraints/test_Constraint_equality.py +++ b/lib/iris/tests/unit/constraints/test_Constraint_equality.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for equality testing of different constraint types.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/constraints/test_NameConstraint.py b/lib/iris/tests/unit/constraints/test_NameConstraint.py index 46aea25331..b959b82434 100644 --- a/lib/iris/tests/unit/constraints/test_NameConstraint.py +++ b/lib/iris/tests/unit/constraints/test_NameConstraint.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris._constraints.NameConstraint` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_categorisation/__init__.py b/lib/iris/tests/unit/coord_categorisation/__init__.py index 18fe8f2482..9c60613915 100644 --- a/lib/iris/tests/unit/coord_categorisation/__init__.py +++ b/lib/iris/tests/unit/coord_categorisation/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.coord_categorisation` module.""" diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py index 0c20f16f5a..2291e677bc 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.coord_categorisation.add_categorised_coord`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py index 418ac72557..caf52e9c84 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test coordinate categorisation function add_hour. """ diff --git a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py index fbc3514147..6560f65a32 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py +++ b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test the coordinate categorisation functions. """ diff --git a/lib/iris/tests/unit/coord_systems/__init__.py b/lib/iris/tests/unit/coord_systems/__init__.py index 39d4d25f73..21f703eed0 100644 --- a/lib/iris/tests/unit/coord_systems/__init__.py +++ b/lib/iris/tests/unit/coord_systems/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.coord_systems` module.""" diff --git a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py index 99a7c9f59b..26aa79ac47 100644 --- a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py +++ b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.coord_systems.AlbersEqualArea` class. diff --git a/lib/iris/tests/unit/coord_systems/test_GeogCS.py b/lib/iris/tests/unit/coord_systems/test_GeogCS.py index f3f9531dbb..acb9029b1e 100644 --- a/lib/iris/tests/unit/coord_systems/test_GeogCS.py +++ b/lib/iris/tests/unit/coord_systems/test_GeogCS.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.GeogCS` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_Geostationary.py b/lib/iris/tests/unit/coord_systems/test_Geostationary.py index cc3c8384db..f144dca190 100644 --- a/lib/iris/tests/unit/coord_systems/test_Geostationary.py +++ b/lib/iris/tests/unit/coord_systems/test_Geostationary.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.Geostationary` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py index 971ee06293..b2d0c576bb 100644 --- a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py +++ b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.coord_systems.LambertAzimuthalEqualArea` class. diff --git a/lib/iris/tests/unit/coord_systems/test_LambertConformal.py b/lib/iris/tests/unit/coord_systems/test_LambertConformal.py index 7ba89208b1..a9bf70ef94 100644 --- a/lib/iris/tests/unit/coord_systems/test_LambertConformal.py +++ b/lib/iris/tests/unit/coord_systems/test_LambertConformal.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.LambertConformal` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_Mercator.py b/lib/iris/tests/unit/coord_systems/test_Mercator.py index ba04c77d57..dd2f42bb2f 100644 --- a/lib/iris/tests/unit/coord_systems/test_Mercator.py +++ b/lib/iris/tests/unit/coord_systems/test_Mercator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.Mercator` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py index 0799fb881e..b17c1cc788 100644 --- a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py +++ b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.ObliqueMercator` class.""" from typing import List, NamedTuple diff --git a/lib/iris/tests/unit/coord_systems/test_Orthographic.py b/lib/iris/tests/unit/coord_systems/test_Orthographic.py index ffcbecf55c..a2b63ad5fe 100644 --- a/lib/iris/tests/unit/coord_systems/test_Orthographic.py +++ b/lib/iris/tests/unit/coord_systems/test_Orthographic.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.Orthographic` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py index 25f5d24800..16f3ef2e7d 100755 --- a/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py +++ b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.PolarStereographic` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py b/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py index 97921efec6..01a0640d3b 100644 --- a/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py +++ b/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.RotatedMercator` class.""" import pytest diff --git a/lib/iris/tests/unit/coord_systems/test_RotatedPole.py b/lib/iris/tests/unit/coord_systems/test_RotatedPole.py index dbb7a05bca..69408a96bc 100644 --- a/lib/iris/tests/unit/coord_systems/test_RotatedPole.py +++ b/lib/iris/tests/unit/coord_systems/test_RotatedPole.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.RotatedPole` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_Stereographic.py b/lib/iris/tests/unit/coord_systems/test_Stereographic.py index acd77112c1..cdc2fee581 100644 --- a/lib/iris/tests/unit/coord_systems/test_Stereographic.py +++ b/lib/iris/tests/unit/coord_systems/test_Stereographic.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.Stereographic` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py b/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py index 95b80333c2..526985e20d 100644 --- a/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py +++ b/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.TransverseMercator` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py index 56498e40fa..4cd5f215a9 100644 --- a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py +++ b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coord_systems.VerticalPerspective` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/__init__.py b/lib/iris/tests/unit/coords/__init__.py index 10cee9db8b..a99795d4da 100644 --- a/lib/iris/tests/unit/coords/__init__.py +++ b/lib/iris/tests/unit/coords/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :mod:`iris.coords` module. diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py index e5fc8fd28a..0177bcafc4 100644 --- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py +++ b/lib/iris/tests/unit/coords/test_AncillaryVariable.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coords.AncillaryVariable` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/test_AuxCoord.py b/lib/iris/tests/unit/coords/test_AuxCoord.py index e5147659fc..31bd54eb12 100644 --- a/lib/iris/tests/unit/coords/test_AuxCoord.py +++ b/lib/iris/tests/unit/coords/test_AuxCoord.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.coords.AuxCoord` class. diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py index 2408ec9f36..b34ffdfb91 100644 --- a/lib/iris/tests/unit/coords/test_Cell.py +++ b/lib/iris/tests/unit/coords/test_Cell.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coords.Cell` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/test_CellMeasure.py b/lib/iris/tests/unit/coords/test_CellMeasure.py index 0bd66c6e98..c667e012ef 100644 --- a/lib/iris/tests/unit/coords/test_CellMeasure.py +++ b/lib/iris/tests/unit/coords/test_CellMeasure.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coords.CellMeasure` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/test_CellMethod.py b/lib/iris/tests/unit/coords/test_CellMethod.py index 21b309a32b..274606510a 100644 --- a/lib/iris/tests/unit/coords/test_CellMethod.py +++ b/lib/iris/tests/unit/coords/test_CellMethod.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.coords.CellMethod`. """ diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index c548d017f2..1c9c3cce2d 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coords.Coord` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/test_DimCoord.py b/lib/iris/tests/unit/coords/test_DimCoord.py index dd0ba48f3d..2c8ab3a7ba 100644 --- a/lib/iris/tests/unit/coords/test_DimCoord.py +++ b/lib/iris/tests/unit/coords/test_DimCoord.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.coords.DimCoord` class. diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index 83fcbc4512..91a50a9a1c 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.coords._DimensionalMetadata` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/cube/__init__.py b/lib/iris/tests/unit/cube/__init__.py index 7852593e21..8c72b2af8c 100644 --- a/lib/iris/tests/unit/cube/__init__.py +++ b/lib/iris/tests/unit/cube/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.cube` module.""" diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 443c9db546..b1eed4743e 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.Cube` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index 86457d3888..386df39b66 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.CubeList` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py index c591e45f63..854a0d431a 100644 --- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.Cube` class aggregated_by method.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/unit/cube/test_Cube__operators.py b/lib/iris/tests/unit/cube/test_Cube__operators.py index e860c57636..0afd5a9d70 100644 --- a/lib/iris/tests/unit/cube/test_Cube__operators.py +++ b/lib/iris/tests/unit/cube/test_Cube__operators.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.Cube` class operators.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/unit/data_manager/__init__.py b/lib/iris/tests/unit/data_manager/__init__.py index 41dcc0adf3..1a2ebdc944 100644 --- a/lib/iris/tests/unit/data_manager/__init__.py +++ b/lib/iris/tests/unit/data_manager/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris._data_manager` module.""" diff --git a/lib/iris/tests/unit/data_manager/test_DataManager.py b/lib/iris/tests/unit/data_manager/test_DataManager.py index e73714730f..1b91e256f4 100644 --- a/lib/iris/tests/unit/data_manager/test_DataManager.py +++ b/lib/iris/tests/unit/data_manager/test_DataManager.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris._data_manager.DataManager`. diff --git a/lib/iris/tests/unit/experimental/__init__.py b/lib/iris/tests/unit/experimental/__init__.py index 438827bab2..38af9c7a97 100644 --- a/lib/iris/tests/unit/experimental/__init__.py +++ b/lib/iris/tests/unit/experimental/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental` package.""" diff --git a/lib/iris/tests/unit/experimental/raster/__init__.py b/lib/iris/tests/unit/experimental/raster/__init__.py index 5f85d810c9..408926e2d9 100644 --- a/lib/iris/tests/unit/experimental/raster/__init__.py +++ b/lib/iris/tests/unit/experimental/raster/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.raster` module.""" diff --git a/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py b/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py index a3b68ef761..af726c0fa9 100644 --- a/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py +++ b/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.experimental.raster.export_geotiff` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/regrid/__init__.py b/lib/iris/tests/unit/experimental/regrid/__init__.py index 578c15f11c..f001cccada 100644 --- a/lib/iris/tests/unit/experimental/regrid/__init__.py +++ b/lib/iris/tests/unit/experimental/regrid/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.regrid` package.""" diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index 5ec3c956b9..3cec1f8569 100644 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid`. diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py index b0908dd2e4..f9397da219 100644 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py +++ b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.experimental.regrid.regrid_weighted_curvilinear_to_rectilinear`. diff --git a/lib/iris/tests/unit/experimental/representation/__init__.py b/lib/iris/tests/unit/experimental/representation/__init__.py index c856263a5c..764b46ef58 100644 --- a/lib/iris/tests/unit/experimental/representation/__init__.py +++ b/lib/iris/tests/unit/experimental/representation/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.representation` package.""" diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py index 8dc3cd7849..678eaa5fea 100644 --- a/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py +++ b/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.CubeRepresentation` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py index e6b1425110..aadb07c882 100644 --- a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py +++ b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.CubeRepresentation` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/stratify/__init__.py b/lib/iris/tests/unit/experimental/stratify/__init__.py index 7218455e76..41663ee7a4 100644 --- a/lib/iris/tests/unit/experimental/stratify/__init__.py +++ b/lib/iris/tests/unit/experimental/stratify/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.stratify` package.""" diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index 6958fa9a2f..a0db398257 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.experimental.stratify.relevel` function. diff --git a/lib/iris/tests/unit/experimental/ugrid/__init__.py b/lib/iris/tests/unit/experimental/ugrid/__init__.py index 7f55678f06..27d7921e5f 100644 --- a/lib/iris/tests/unit/experimental/ugrid/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.ugrid` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py b/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py index 2e70f2cd5d..19507555c7 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.cf` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py index 641b6b7b44..22914215b7 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridAuxiliaryCoordinateVariable` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py index 5a68a8c03f..5fae20e6fc 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridConnectivityVariable` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py index a3a0e665bb..1e707d9550 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py index 8302c30177..59d3a8aad9 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridMeshVariable` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py index d9de814b05..52eb569b43 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/__init__.py b/lib/iris/tests/unit/experimental/ugrid/load/__init__.py index 36c9108dc2..3248db6e41 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.load` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py b/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py index 1203633297..5c33b27d3e 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.load.ParseUgridOnLoad` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py index 4de11d5610..8dab48ae9c 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.experimental.ugrid.load.load_mesh` function. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py index 310e68248a..1ec3e65a97 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.experimental.ugrid.load.load_meshes` function. diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py b/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py index 4ce979d845..d485782c11 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.mesh` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index 7e90555801..7251597006 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.experimental.ugrid.mesh.Connectivity` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index 64d106f97d..6784bb6e50 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`mesh` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index b95c5f09bb..a023762d10 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.mesh.MeshCoord`. diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py index edd34f94a1..2581bf106a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords`. diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py b/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py index 2d2d040c1d..a8ad2bc014 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.metadata` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py index af92e69b08..3b8e5ded9f 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.metadata.ConnectivityMetadata`. diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py index 5c96fb7856..0786c52934 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshCoordMetadata`. diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py index a8b25dc2e7..ba7199b777 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshMetadata`. diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py b/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py index 135d7ee49c..ea8202f8fb 100644 --- a/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.utils` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py index 4face700ad..a3cd91815f 100644 --- a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.experimental.ugrid.utils.recombine_submeshes`. diff --git a/lib/iris/tests/unit/fileformats/__init__.py b/lib/iris/tests/unit/fileformats/__init__.py index fa31283c87..4c0bca25c1 100644 --- a/lib/iris/tests/unit/fileformats/__init__.py +++ b/lib/iris/tests/unit/fileformats/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats` package.""" import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/fileformats/abf/__init__.py b/lib/iris/tests/unit/fileformats/abf/__init__.py index aaddf427c5..5ddf017c42 100644 --- a/lib/iris/tests/unit/fileformats/abf/__init__.py +++ b/lib/iris/tests/unit/fileformats/abf/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.abf` module.""" diff --git a/lib/iris/tests/unit/fileformats/abf/test_ABFField.py b/lib/iris/tests/unit/fileformats/abf/test_ABFField.py index 98db52d3e9..b67e02ec06 100644 --- a/lib/iris/tests/unit/fileformats/abf/test_ABFField.py +++ b/lib/iris/tests/unit/fileformats/abf/test_ABFField.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.abf.ABFField` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/cf/__init__.py b/lib/iris/tests/unit/fileformats/cf/__init__.py index 1bff79368b..6bc562f922 100644 --- a/lib/iris/tests/unit/fileformats/cf/__init__.py +++ b/lib/iris/tests/unit/fileformats/cf/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.cf` module.""" diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py index bfc2d586ef..48f383d7f7 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.fileformats.cf.CFGroup` class.""" from unittest.mock import MagicMock diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 9e5cf9b7a5..4829d03dbb 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.fileformats.cf.CFReader` class. diff --git a/lib/iris/tests/unit/fileformats/dot/__init__.py b/lib/iris/tests/unit/fileformats/dot/__init__.py index 0dbc3ad4c6..afbfed17d8 100644 --- a/lib/iris/tests/unit/fileformats/dot/__init__.py +++ b/lib/iris/tests/unit/fileformats/dot/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :mod:`iris.fileformats.dot`.""" diff --git a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py index 1111e8bc83..a29eb625d0 100644 --- a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py +++ b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.dot._dot_path`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/__init__.py b/lib/iris/tests/unit/fileformats/ff/__init__.py index 4d13a18520..945b4f46b5 100644 --- a/lib/iris/tests/unit/fileformats/ff/__init__.py +++ b/lib/iris/tests/unit/fileformats/ff/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.ff` module.""" diff --git a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py index d37b854405..5e731632c6 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.ArakawaC`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py index 696dacd672..98bc42ddf3 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.ENDGame`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index 16943c0c15..15bb61e230 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.fileformat.ff.FF2PP` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py index 72d522ec85..cbbc81dd4b 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.FFHeader`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/test_Grid.py b/lib/iris/tests/unit/fileformats/ff/test_Grid.py index b20c85b9a8..1bb9688c1a 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_Grid.py +++ b/lib/iris/tests/unit/fileformats/ff/test_Grid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.Grid`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py index 5f0d64da71..f3cc41aa82 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py +++ b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.NewDynamics`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/name_loaders/__init__.py b/lib/iris/tests/unit/fileformats/name_loaders/__init__.py index 751801a176..e9af5168b8 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/__init__.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.name_loaders` package.""" diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py index 624837c19d..2ebde5782f 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.name_loaders._build_cell_methods`. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py index 5954823c54..e71a31f10f 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.analysis.name_loaders._build_lat_lon_for_NAME_timeseries`. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py index c4cbde8c14..20ef79cec3 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.name_loaders.__calc_integration_period`. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py index 078f65d572..ea09d40acb 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.analysis.name_loaders._cf_height_from_name` function. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py index d50a7fdad1..2eea25a26d 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.analysis.name_loaders._generate_cubes`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py index 2ea22c420b..a29f504b7e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules` . diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 38882810d2..efb5e55be8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index 99a1b66ae4..906ba33f9c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index d962fc2758..582ab7e200 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py index dfa862c4d1..a8c7d2cc5f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py index ffe00c8c19..a1a93056cb 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index 59ffa30684..c27d2445e9 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py index e6508bea85..127ebbf68b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules.engine` . diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py index df5fbd4922..994d2958c2 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py index 69a536b9ae..62bc3a6c9f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules.helpers` . diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py index c040d43ca0..b6e9ba954c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_albers_equal_area_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py index 87070e00ba..2d1010166f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.build_ancil_var`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py index 369f92f238..fd500b4831 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_auxilliary_coordinate`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py index d0421186b4..ee66f8b267 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.build_cell_measure`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py index a13fa6cca0..e2297be69e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers\ build_cube_metadata`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py index bc13975441..4f19d44a2a 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_dimension_coordinate`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py index 28b3d8ab9a..a3efcb0dc4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_geostationary_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py index 05185a4cf5..8d5b46c6bb 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_lambert_azimuthal_equal_area_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py index 22bb7149b1..9232e146cb 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_lambert_conformal_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index ab61d3b1b2..4958eccbfd 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_mercator_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py index b11d8d3cca..c377cf7d1b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py index 09cfde9d5b..a20443005c 100755 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_polar_stereographic_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index 3796aeebab..a483390e36 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_sterographic_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py index 0096c5df4b..ae881259fe 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_transverse_mercator_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py index f34992c2be..21906ba644 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_vertical_perspective_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py index a159ef81a8..d80b33f002 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_attr_units`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py index ff9c51f40b..7d0dc4952c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_cf_bounds_var`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py index 3c7c496b54..b6a0f3d3c1 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_names`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index bb94adc72e..9dc31ba490 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ has_supported_mercator_parameters`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py index 6e6d6e4e81..faffefd8f2 100755 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ has_supported_polar_stereographic_parameters`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index 9935a6e5ae..a58413d399 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py index 1ee0cfbf2e..1e9d13110e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ reorder_bounds_data`. diff --git a/lib/iris/tests/unit/fileformats/netcdf/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/__init__.py index 732094f67a..961f7779a8 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.netcdf` module.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py index 7c2ae96158..67d3fe0fc6 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.netcdf.loader` module.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py index 6c487d74e7..3c3cbff7f4 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.netcdf._get_cf_var_data` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py index c15c8737fd..01c6838241 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.netcdf._load_aux_factory` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py index b67c546aa0..25beca8f59 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.netcdf._load_cube` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py index 77bb0d3950..2522392c21 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.netcdf._translate_constraints_to_var_callback`. diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 1a2ef1d29d..77c1da2d1c 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.fileformats.netcdf.load_cubes` function. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py index a68d5fc5d0..53d42128f6 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.netcdf.saver` module.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 8253e59368..e4d8488a76 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.fileformats.netcdf.Saver` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index e1211dc276..2e7091c43b 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Mirror of :mod:`iris.tests.unit.fileformats.netcdf.test_Saver`, but with lazy arrays.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index da5f2d88fa..10c5dbecf4 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :meth:`iris.fileformats.netcdf.saver.Saver._lazy_stream_data`. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 27d9709fe6..8177e0c299 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :class:`iris.fileformats.netcdf.Saver` class. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py index 95a518e4e5..9068837b2c 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.netcdf.saver._data_fillvalue_check`. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py index 317f75bb8c..42119094a7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`. """ diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index 68049b57fc..620bc64461 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.fileformats.netcdf.save` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py index 429ee9ce1f..928c9d9361 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.nimrod_load_rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py index a15337f849..c15a721ad3 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.fileformats.nimrod_load_rules.units` function. diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py index 2279bcffc3..4f1b948839 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.fileformats.nimrod_load_rules.vertical_coord` function. diff --git a/lib/iris/tests/unit/fileformats/pp/__init__.py b/lib/iris/tests/unit/fileformats/pp/__init__.py index f309b6848a..7eedc830d9 100644 --- a/lib/iris/tests/unit/fileformats/pp/__init__.py +++ b/lib/iris/tests/unit/fileformats/pp/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.pp` module.""" diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py index d70e573296..4d963e7f08 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.PPDataProxy` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index f2bbf97a80..1a49f57712 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.PPField` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py index 514e326393..cca9bb4641 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py +++ b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.load` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py index 16d2b500a5..ab80332186 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py +++ b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._create_field_data` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py index 73913c6219..45635af391 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function. diff --git a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py index 31ac4f6b19..80b90fc8d2 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py +++ b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._field_gen` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py index 0b83cade76..aa03c068ce 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py +++ b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._interpret_field` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py index 3ff228e106..213eb6c9c4 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py +++ b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.as_fields` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_load.py b/lib/iris/tests/unit/fileformats/pp/test_load.py index 77da1288c2..e802b36c0e 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_load.py +++ b/lib/iris/tests/unit/fileformats/pp/test_load.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.load` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index 8200259cca..fc0535f428 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.save` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py index fdd470cb47..2eaebc0059 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.save_fields` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py index cdd3c9cd49..5ab3f7c480 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.save_pairs_from_cube` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py index 70d28f7c09..c8361feae4 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.pp_load_rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py index e194e240c6..591bfda857 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the `iris.fileformats.pp_load_rules._all_other_rules` function. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py index c9c4821e0a..c87e199956 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._collapse_degenerate_points_and_bounds`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py index d3046ee63e..803e47227f 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._convert_pseudo_level_coords`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py index 759a399dad..6159a1dbd4 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._convert_scalar_realization_coords`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py index cf147e5928..5afaeee45d 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._convert_time_coords`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py index 47552a646a..a7ed6355f6 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._convert_vertical_coords`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py index 7769ca1de1..176d0a38a1 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.pp_load_rules._dim_or_aux`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py index 2c5d672e14..2724d45871 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._epoch_date_hours`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py index fa381b91c1..c99de5bc34 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._model_level_number`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py index fc30f66f7f..a33128f39b 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._reduce_points_and_bounds`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py index 4e6d50fea7..d12a718e98 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._reshape_vector_args`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py index 569d676183..0b46d11f9d 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.pp_load_rules.convert`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/rules/__init__.py b/lib/iris/tests/unit/fileformats/rules/__init__.py index 55c9c7779e..1b14a8b07b 100644 --- a/lib/iris/tests/unit/fileformats/rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/rules/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/rules/test_Loader.py b/lib/iris/tests/unit/fileformats/rules/test_Loader.py index be96f526d2..b99d1e6f40 100644 --- a/lib/iris/tests/unit/fileformats/rules/test_Loader.py +++ b/lib/iris/tests/unit/fileformats/rules/test_Loader.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformats.rules.Loader`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py index b6c4528399..91862658e5 100644 --- a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py +++ b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.rules._make_cube`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py b/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py index c703284fc0..8a0a9a38d7 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :mod:`iris.fileformats._structured_array_identification` module. diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py index 871aab4f1e..9f5466afaa 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :mod:`iris.fileformats._structured_array_identification.ArrayStructure` class. diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py index a7818ad802..ec98664f51 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :mod:`iris.fileformats._structured_array_identification.GroupStructure` class. diff --git a/lib/iris/tests/unit/fileformats/test_rules.py b/lib/iris/tests/unit/fileformats/test_rules.py index c243a374cb..b7e17b205e 100644 --- a/lib/iris/tests/unit/fileformats/test_rules.py +++ b/lib/iris/tests/unit/fileformats/test_rules.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test iris.fileformats.rules.py - metadata translation rules. diff --git a/lib/iris/tests/unit/fileformats/um/__init__.py b/lib/iris/tests/unit/fileformats/um/__init__.py index 6b4abc61bb..6652c6d543 100644 --- a/lib/iris/tests/unit/fileformats/um/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.um` package.""" diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py index b5eb259e5b..f2c18b5f8a 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the module :mod:`iris.fileformats.um._fast_load`. diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py index 0c15e5e839..930050813f 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the class :class:`iris.fileformats.um._fast_load.FieldCollation`. diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py index 90c411b41d..f6e3fd5928 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.um._fast_load._convert_collation`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py index f0932c3ac8..c26382aca9 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the module :mod:`iris.fileformats.um._fast_load_structured_fields`. diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py index 57100c79af..19c64ec57a 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the class :class:`iris.fileformats.um._fast_load_structured_fields.BasicFieldCollation`. diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py index b7ef9a62a3..75b54dfd4f 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:\ `iris.fileformats.um._fast_load_structured_fields.group_structured_fields`. diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py index 8070719de8..5a72973519 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the module :mod:`iris.fileformats.um._optimal_array_structuring`. diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py index 96566f3c80..92a8b19ec0 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:`iris.fileformats.um._optimal_array_structuring.optimal_array_structure`. diff --git a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py index ef6369f638..40ac6826d3 100644 --- a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py +++ b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the function :func:`iris.fileformats.um.um_to_pp`. diff --git a/lib/iris/tests/unit/io/__init__.py b/lib/iris/tests/unit/io/__init__.py index 5e347c9ebc..1a11fe5d30 100644 --- a/lib/iris/tests/unit/io/__init__.py +++ b/lib/iris/tests/unit/io/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.io` package.""" diff --git a/lib/iris/tests/unit/io/test__generate_cubes.py b/lib/iris/tests/unit/io/test__generate_cubes.py index 3a896a111c..96d790db2d 100755 --- a/lib/iris/tests/unit/io/test__generate_cubes.py +++ b/lib/iris/tests/unit/io/test__generate_cubes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.io._generate_cubes` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/io/test_expand_filespecs.py b/lib/iris/tests/unit/io/test_expand_filespecs.py index 8720478153..bd5e5933a3 100644 --- a/lib/iris/tests/unit/io/test_expand_filespecs.py +++ b/lib/iris/tests/unit/io/test_expand_filespecs.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.io.expand_filespecs` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/io/test_run_callback.py b/lib/iris/tests/unit/io/test_run_callback.py index 94ae7ac09d..cd55743b29 100644 --- a/lib/iris/tests/unit/io/test_run_callback.py +++ b/lib/iris/tests/unit/io/test_run_callback.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.io.run_callback` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/io/test_save.py b/lib/iris/tests/unit/io/test_save.py index 623cf417f2..cec125d0fe 100755 --- a/lib/iris/tests/unit/io/test_save.py +++ b/lib/iris/tests/unit/io/test_save.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.io.save` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/__init__.py b/lib/iris/tests/unit/lazy_data/__init__.py index b463897c50..55920077f3 100644 --- a/lib/iris/tests/unit/lazy_data/__init__.py +++ b/lib/iris/tests/unit/lazy_data/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris._lazy_data` module.""" diff --git a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py index 1a98c81fac..91b22a3c0e 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.as_concrete_data`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py index 5f9dece153..0acb085830 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test the function :func:`iris._lazy data.as_lazy_data`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py index 0c10d69c16..3b265d615d 100644 --- a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py +++ b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.co_realise_cubes`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py index 45b3194f32..a8018c67b1 100644 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.is_lazy_data`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py index 4d627a706b..6466ab0ea2 100644 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.is_lazy_masked_data`.""" import dask.array as da diff --git a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py index 49fd6ad70b..651a774c4d 100644 --- a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py +++ b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.lazy_elementwise`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py index 66c03d04c8..1c694d292b 100644 --- a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py +++ b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.map_complete_blocks`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py b/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py index 9fe79a0d4c..993cb01178 100644 --- a/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py +++ b/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.multidim_lazy_stack`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_non_lazy.py b/lib/iris/tests/unit/lazy_data/test_non_lazy.py index cc4ed33ea3..3c6bb99e0a 100644 --- a/lib/iris/tests/unit/lazy_data/test_non_lazy.py +++ b/lib/iris/tests/unit/lazy_data/test_non_lazy.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.non_lazy`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/merge/__init__.py b/lib/iris/tests/unit/merge/__init__.py index c3ead61576..14ef96573f 100644 --- a/lib/iris/tests/unit/merge/__init__.py +++ b/lib/iris/tests/unit/merge/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris._merge` module.""" diff --git a/lib/iris/tests/unit/merge/test_ProtoCube.py b/lib/iris/tests/unit/merge/test_ProtoCube.py index 0fca726b28..80f135e108 100644 --- a/lib/iris/tests/unit/merge/test_ProtoCube.py +++ b/lib/iris/tests/unit/merge/test_ProtoCube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris._merge.ProtoCube` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/pandas/__init__.py b/lib/iris/tests/unit/pandas/__init__.py index 103a264839..2ee1fb1cfe 100644 --- a/lib/iris/tests/unit/pandas/__init__.py +++ b/lib/iris/tests/unit/pandas/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.pandas` module.""" diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index d74d7cad9c..6f617439db 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """All unit tests for the :mod:`iris.pandas` module.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/__init__.py b/lib/iris/tests/unit/plot/__init__.py index f589a29e0d..7481cdafa3 100644 --- a/lib/iris/tests/unit/plot/__init__.py +++ b/lib/iris/tests/unit/plot/__init__.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.plot` module.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/_blockplot_common.py b/lib/iris/tests/unit/plot/_blockplot_common.py index 455b416164..e3e88304fa 100644 --- a/lib/iris/tests/unit/plot/_blockplot_common.py +++ b/lib/iris/tests/unit/plot/_blockplot_common.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Common test code for `iris.plot.pcolor` and `iris.plot.pcolormesh`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py index 4dfc6d7f68..3e25f0aadb 100644 --- a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py +++ b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._check_bounds_contiguity_and_mask` function.""" diff --git a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py index a06a437396..cfbb15cdef 100644 --- a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py +++ b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._check_geostationary_coords_and_convert function.""" diff --git a/lib/iris/tests/unit/plot/test__fixup_dates.py b/lib/iris/tests/unit/plot/test__fixup_dates.py index 0abef01e41..d155f30969 100644 --- a/lib/iris/tests/unit/plot/test__fixup_dates.py +++ b/lib/iris/tests/unit/plot/test__fixup_dates.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._fixup_dates` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn.py b/lib/iris/tests/unit/plot/test__get_plot_defn.py index c69173dc70..512dc7f0b2 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._get_plot_defn` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py index 631f9bd24e..dcd8fac9e1 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._get_plot_defn_custom_coords_picked` function.""" diff --git a/lib/iris/tests/unit/plot/test__get_plot_objects.py b/lib/iris/tests/unit/plot/test__get_plot_objects.py index 8586faa756..fbccbe94fb 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_objects.py +++ b/lib/iris/tests/unit/plot/test__get_plot_objects.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._get_plot_objects` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py index 8e2d4f226b..8d4054b35a 100644 --- a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py +++ b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.__replace_axes_with_cartopy_axes` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_contour.py b/lib/iris/tests/unit/plot/test_contour.py index 823b3270d0..43c0564ff4 100644 --- a/lib/iris/tests/unit/plot/test_contour.py +++ b/lib/iris/tests/unit/plot/test_contour.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.contour` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_contourf.py b/lib/iris/tests/unit/plot/test_contourf.py index de84e88a52..64ab87f879 100644 --- a/lib/iris/tests/unit/plot/test_contourf.py +++ b/lib/iris/tests/unit/plot/test_contourf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.contourf` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_hist.py b/lib/iris/tests/unit/plot/test_hist.py index 8a74ff8701..feef8f1062 100644 --- a/lib/iris/tests/unit/plot/test_hist.py +++ b/lib/iris/tests/unit/plot/test_hist.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.hist` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/plot/test_outline.py b/lib/iris/tests/unit/plot/test_outline.py index de59287362..dc1b27487b 100644 --- a/lib/iris/tests/unit/plot/test_outline.py +++ b/lib/iris/tests/unit/plot/test_outline.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.outline` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_pcolor.py b/lib/iris/tests/unit/plot/test_pcolor.py index 1cde9e8822..219df4d446 100644 --- a/lib/iris/tests/unit/plot/test_pcolor.py +++ b/lib/iris/tests/unit/plot/test_pcolor.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.pcolor` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_pcolormesh.py b/lib/iris/tests/unit/plot/test_pcolormesh.py index f4e84e5765..a5525770f2 100644 --- a/lib/iris/tests/unit/plot/test_pcolormesh.py +++ b/lib/iris/tests/unit/plot/test_pcolormesh.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.pcolormesh` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_plot.py b/lib/iris/tests/unit/plot/test_plot.py index 1ed2da1b13..db33862a7e 100644 --- a/lib/iris/tests/unit/plot/test_plot.py +++ b/lib/iris/tests/unit/plot/test_plot.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.plot` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_points.py b/lib/iris/tests/unit/plot/test_points.py index e1a23eff83..0d713e3d84 100644 --- a/lib/iris/tests/unit/plot/test_points.py +++ b/lib/iris/tests/unit/plot/test_points.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.points` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_scatter.py b/lib/iris/tests/unit/plot/test_scatter.py index c5cd9cb2f2..f3b2ec1592 100644 --- a/lib/iris/tests/unit/plot/test_scatter.py +++ b/lib/iris/tests/unit/plot/test_scatter.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.scatter` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/__init__.py b/lib/iris/tests/unit/quickplot/__init__.py index 471ef0f6a5..1ce65d9647 100644 --- a/lib/iris/tests/unit/quickplot/__init__.py +++ b/lib/iris/tests/unit/quickplot/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.quickplot` module.""" diff --git a/lib/iris/tests/unit/quickplot/test_contour.py b/lib/iris/tests/unit/quickplot/test_contour.py index 8e3db7c3e0..2f3bb1a45d 100644 --- a/lib/iris/tests/unit/quickplot/test_contour.py +++ b/lib/iris/tests/unit/quickplot/test_contour.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.contour` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_contourf.py b/lib/iris/tests/unit/quickplot/test_contourf.py index e510e661ae..55c9940821 100644 --- a/lib/iris/tests/unit/quickplot/test_contourf.py +++ b/lib/iris/tests/unit/quickplot/test_contourf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.contourf` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_outline.py b/lib/iris/tests/unit/quickplot/test_outline.py index 70d96372fa..4dd924b749 100644 --- a/lib/iris/tests/unit/quickplot/test_outline.py +++ b/lib/iris/tests/unit/quickplot/test_outline.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.outline` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_pcolor.py b/lib/iris/tests/unit/quickplot/test_pcolor.py index 2e559d6308..79f6904e12 100644 --- a/lib/iris/tests/unit/quickplot/test_pcolor.py +++ b/lib/iris/tests/unit/quickplot/test_pcolor.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.pcolor` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_pcolormesh.py b/lib/iris/tests/unit/quickplot/test_pcolormesh.py index 32ae3ed716..826f0e7121 100644 --- a/lib/iris/tests/unit/quickplot/test_pcolormesh.py +++ b/lib/iris/tests/unit/quickplot/test_pcolormesh.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.pcolormesh` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_plot.py b/lib/iris/tests/unit/quickplot/test_plot.py index 0a36a3fa4e..35e1eae470 100644 --- a/lib/iris/tests/unit/quickplot/test_plot.py +++ b/lib/iris/tests/unit/quickplot/test_plot.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.plot` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_points.py b/lib/iris/tests/unit/quickplot/test_points.py index 3810cdd343..b28c37bf87 100644 --- a/lib/iris/tests/unit/quickplot/test_points.py +++ b/lib/iris/tests/unit/quickplot/test_points.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.points` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_scatter.py b/lib/iris/tests/unit/quickplot/test_scatter.py index c1cf853970..db3e9948a0 100644 --- a/lib/iris/tests/unit/quickplot/test_scatter.py +++ b/lib/iris/tests/unit/quickplot/test_scatter.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.scatter` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/representation/__init__.py b/lib/iris/tests/unit/representation/__init__.py index e943ad149b..19824735c1 100644 --- a/lib/iris/tests/unit/representation/__init__.py +++ b/lib/iris/tests/unit/representation/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris._representation` module.""" diff --git a/lib/iris/tests/unit/representation/cube_printout/__init__.py b/lib/iris/tests/unit/representation/cube_printout/__init__.py index 50ab3f8e45..15e84606db 100644 --- a/lib/iris/tests/unit/representation/cube_printout/__init__.py +++ b/lib/iris/tests/unit/representation/cube_printout/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris._representation.cube_printout` module.""" diff --git a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py index 65fb115243..20d5c47e01 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris._representation.cube_printout.CubePrintout`.""" import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/representation/cube_printout/test_Table.py b/lib/iris/tests/unit/representation/cube_printout/test_Table.py index e5dba52c61..ff9b6cf51b 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_Table.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_Table.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris._representation.cube_printout.Table`.""" from iris._representation.cube_printout import Table import iris.tests as tests diff --git a/lib/iris/tests/unit/representation/cube_summary/__init__.py b/lib/iris/tests/unit/representation/cube_summary/__init__.py index c20a621ba2..684221e6d2 100644 --- a/lib/iris/tests/unit/representation/cube_summary/__init__.py +++ b/lib/iris/tests/unit/representation/cube_summary/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris._representation.cube_summary` module.""" diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index d81f680df5..1280c3b38f 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris._representation.cube_summary.CubeSummary`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/test_Future.py b/lib/iris/tests/unit/test_Future.py index f0c161b0c4..00f6b82519 100644 --- a/lib/iris/tests/unit/test_Future.py +++ b/lib/iris/tests/unit/test_Future.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.Future` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/test_sample_data_path.py b/lib/iris/tests/unit/test_sample_data_path.py index ebf3b8108c..aff2c1088f 100644 --- a/lib/iris/tests/unit/test_sample_data_path.py +++ b/lib/iris/tests/unit/test_sample_data_path.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.sample_data_path` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/tests/__init__.py b/lib/iris/tests/unit/tests/__init__.py index b8d27d34d3..b0c801b816 100644 --- a/lib/iris/tests/unit/tests/__init__.py +++ b/lib/iris/tests/unit/tests/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.tests` package.""" diff --git a/lib/iris/tests/unit/tests/stock/__init__.py b/lib/iris/tests/unit/tests/stock/__init__.py index f91390c2b3..ad31134ad4 100644 --- a/lib/iris/tests/unit/tests/stock/__init__.py +++ b/lib/iris/tests/unit/tests/stock/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.tests.stock` module.""" diff --git a/lib/iris/tests/unit/tests/stock/test_netcdf.py b/lib/iris/tests/unit/tests/stock/test_netcdf.py index 54d7b895cc..eb1c289c37 100644 --- a/lib/iris/tests/unit/tests/stock/test_netcdf.py +++ b/lib/iris/tests/unit/tests/stock/test_netcdf.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.tests.stock.netcdf` module.""" import shutil diff --git a/lib/iris/tests/unit/tests/test_IrisTest.py b/lib/iris/tests/unit/tests/test_IrisTest.py index 10de2a7760..ef895e45b6 100644 --- a/lib/iris/tests/unit/tests/test_IrisTest.py +++ b/lib/iris/tests/unit/tests/test_IrisTest.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.tests.IrisTest` class.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/time/__init__.py b/lib/iris/tests/unit/time/__init__.py index 3483b92e62..fdbb082434 100644 --- a/lib/iris/tests/unit/time/__init__.py +++ b/lib/iris/tests/unit/time/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.time` module.""" diff --git a/lib/iris/tests/unit/time/test_PartialDateTime.py b/lib/iris/tests/unit/time/test_PartialDateTime.py index cfffafea2c..8223f4a518 100644 --- a/lib/iris/tests/unit/time/test_PartialDateTime.py +++ b/lib/iris/tests/unit/time/test_PartialDateTime.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.time.PartialDateTime` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/__init__.py b/lib/iris/tests/unit/util/__init__.py index 9aed566a19..ce94a18f4e 100644 --- a/lib/iris/tests/unit/util/__init__.py +++ b/lib/iris/tests/unit/util/__init__.py @@ -1,6 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.util` module.""" diff --git a/lib/iris/tests/unit/util/test__coord_regular.py b/lib/iris/tests/unit/util/test__coord_regular.py index a5e9aca9ed..bd9f8f3430 100644 --- a/lib/iris/tests/unit/util/test__coord_regular.py +++ b/lib/iris/tests/unit/util/test__coord_regular.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test elements of :mod:`iris.util` that deal with checking coord regularity. Specifically, this module tests the following functions: diff --git a/lib/iris/tests/unit/util/test__is_circular.py b/lib/iris/tests/unit/util/test__is_circular.py index e67eb38294..67099f49d6 100644 --- a/lib/iris/tests/unit/util/test__is_circular.py +++ b/lib/iris/tests/unit/util/test__is_circular.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util._is_circular`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test__mask_array.py b/lib/iris/tests/unit/util/test__mask_array.py index 91a5aca1b4..2245576de9 100644 --- a/lib/iris/tests/unit/util/test__mask_array.py +++ b/lib/iris/tests/unit/util/test__mask_array.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util._mask_array""" import dask.array as da diff --git a/lib/iris/tests/unit/util/test__slice_data_with_keys.py b/lib/iris/tests/unit/util/test__slice_data_with_keys.py index 061a2f5b37..9c93041521 100644 --- a/lib/iris/tests/unit/util/test__slice_data_with_keys.py +++ b/lib/iris/tests/unit/util/test__slice_data_with_keys.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.util._slice_data_with_keys`. diff --git a/lib/iris/tests/unit/util/test_array_equal.py b/lib/iris/tests/unit/util/test_array_equal.py index 77631907a1..38b9652443 100644 --- a/lib/iris/tests/unit/util/test_array_equal.py +++ b/lib/iris/tests/unit/util/test_array_equal.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.array_equal`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_broadcast_to_shape.py b/lib/iris/tests/unit/util/test_broadcast_to_shape.py index 3df1634ba5..c060967edf 100644 --- a/lib/iris/tests/unit/util/test_broadcast_to_shape.py +++ b/lib/iris/tests/unit/util/test_broadcast_to_shape.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.broadcast_to_shape`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_column_slices_generator.py b/lib/iris/tests/unit/util/test_column_slices_generator.py index 899c6b98ba..fbb5a8f588 100644 --- a/lib/iris/tests/unit/util/test_column_slices_generator.py +++ b/lib/iris/tests/unit/util/test_column_slices_generator.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.column_slices_generator`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py index ec8f9904f1..65e3dec93b 100644 --- a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py +++ b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.demote_dim_coord_to_aux_coord`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_describe_diff.py b/lib/iris/tests/unit/util/test_describe_diff.py index 0bb13cab94..74bd71389e 100644 --- a/lib/iris/tests/unit/util/test_describe_diff.py +++ b/lib/iris/tests/unit/util/test_describe_diff.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.describe_diff`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_equalise_attributes.py b/lib/iris/tests/unit/util/test_equalise_attributes.py index 13aa1e2af4..a4198160a9 100644 --- a/lib/iris/tests/unit/util/test_equalise_attributes.py +++ b/lib/iris/tests/unit/util/test_equalise_attributes.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Unit tests for the :func:`iris.util.equalise_attributes` function. diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py index cff878a294..c27f4f1dcb 100644 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ b/lib/iris/tests/unit/util/test_file_is_newer_than.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Test function :func:`iris.util.test_file_is_newer`. diff --git a/lib/iris/tests/unit/util/test_find_discontiguities.py b/lib/iris/tests/unit/util/test_find_discontiguities.py index 9e043c71bd..6965541320 100644 --- a/lib/iris/tests/unit/util/test_find_discontiguities.py +++ b/lib/iris/tests/unit/util/test_find_discontiguities.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.find_discontiguities""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_mask_cube.py b/lib/iris/tests/unit/util/test_mask_cube.py index 0123d0cca5..7237f0491c 100644 --- a/lib/iris/tests/unit/util/test_mask_cube.py +++ b/lib/iris/tests/unit/util/test_mask_cube.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.mask_cube""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py index a6374f97ad..197c06e449 100644 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ b/lib/iris/tests/unit/util/test_new_axis.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.new_axis`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py index 0e1e56fee5..8ad9cbf4c2 100644 --- a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py +++ b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.promote_aux_coord_to_dim_coord`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_reverse.py b/lib/iris/tests/unit/util/test_reverse.py index 7d9a669a9d..b6da468e7f 100644 --- a/lib/iris/tests/unit/util/test_reverse.py +++ b/lib/iris/tests/unit/util/test_reverse.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.reverse`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_rolling_window.py b/lib/iris/tests/unit/util/test_rolling_window.py index 3644da9c9c..533e5d5633 100644 --- a/lib/iris/tests/unit/util/test_rolling_window.py +++ b/lib/iris/tests/unit/util/test_rolling_window.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.rolling_window`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_squeeze.py b/lib/iris/tests/unit/util/test_squeeze.py index b5f0a91b99..cb4b55c1e6 100644 --- a/lib/iris/tests/unit/util/test_squeeze.py +++ b/lib/iris/tests/unit/util/test_squeeze.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.squeeze`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_unify_time_units.py b/lib/iris/tests/unit/util/test_unify_time_units.py index 8bee046dad..2d7a3b6d64 100644 --- a/lib/iris/tests/unit/util/test_unify_time_units.py +++ b/lib/iris/tests/unit/util/test_unify_time_units.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.array_equal`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/time.py b/lib/iris/time.py index 51aac3d46d..ddedeedd91 100644 --- a/lib/iris/time.py +++ b/lib/iris/time.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Time handling. diff --git a/lib/iris/util.py b/lib/iris/util.py index c040b72b54..ee415d230e 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ Miscellaneous utility functions. diff --git a/pyproject.toml b/pyproject.toml index 4f9ade1351..88b39f1601 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ authors = [ classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", - "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", + "License :: OSI Approved :: BSD License", "Operating System :: MacOS", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", @@ -48,7 +48,7 @@ keywords = [ "ugrid", "visualisation", ] -license = {text = "LGPL-3.0-or-later"} +license = {text = "BSD-3-Clause"} name = "scitools-iris" requires-python = ">=3.9" @@ -59,7 +59,7 @@ Documentation = "https://scitools-iris.readthedocs.io/en/stable/" Issues = "https://github.com/SciTools/iris/issues" [tool.setuptools] -license-files = ["COPYING", "COPYING.LESSER"] +license-files = ["LICENSE"] zip-safe = false [tool.setuptools.dynamic] diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 51a31ef971..8e3b24aac6 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A script to convert the standard names information from the provided XML file into a Python dictionary format. @@ -27,9 +26,8 @@ STD_VALUES_FILE_TEMPLATE = ''' # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ This file contains a dictionary of standard value names that are mapped to another dictionary of other standard name attributes. Currently only diff --git a/tools/release_do_nothing.py b/tools/release_do_nothing.py index afe12a662d..bd38f0f733 100755 --- a/tools/release_do_nothing.py +++ b/tools/release_do_nothing.py @@ -1,9 +1,8 @@ #!/usr/bin/env python3 # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A do-nothing script to hand-hold through the Iris release process. diff --git a/tools/update_lockfiles.py b/tools/update_lockfiles.py index 073f86cda6..a81ab8cafc 100755 --- a/tools/update_lockfiles.py +++ b/tools/update_lockfiles.py @@ -1,8 +1,7 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. """ A command line utility for generating conda-lock files for the environments that nox uses for testing each different supported version of python. From 80c179251219eebdfd03e72b0df123c2cfca61fd Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 19:06:12 +0000 Subject: [PATCH 082/134] [pre-commit.ci] pre-commit autoupdate (#5579) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.10.1 → 23.11.0](https://github.com/psf/black/compare/23.10.1...23.11.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5aefc1da76..cbad42b83a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: additional_dependencies: [tomli] - repo: https://github.com/psf/black - rev: 23.10.1 + rev: 23.11.0 hooks: - id: black pass_filenames: false From 777051848c175df8013c593e85c65978d7d09875 Mon Sep 17 00:00:00 2001 From: Henry Wright <84939917+HGWright@users.noreply.github.com> Date: Fri, 17 Nov 2023 16:19:30 +0000 Subject: [PATCH 083/134] Allowing exemption to axis guessing on coords (#5551) * allowing excemption to axis guessing on coords * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * updating pr * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * remove from metadata * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * remove merge clash * adding review comments * more review changes * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * parametrise and add tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix last test * addressing review comments * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix test failure * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add whatsnew and conftest files * fix sentence * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix flake8 * fix last test * update whatsnew --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 6 ++- lib/iris/coords.py | 38 ++++++++++++-- lib/iris/tests/unit/conftest.py | 14 ++++++ lib/iris/tests/unit/coords/test_Coord.py | 34 +++++++++++++ .../tests/unit/util/test_guess_coord_axis.py | 50 +++++++++++++++++++ lib/iris/util.py | 9 +++- 6 files changed, 146 insertions(+), 5 deletions(-) create mode 100644 lib/iris/tests/unit/conftest.py create mode 100644 lib/iris/tests/unit/util/test_guess_coord_axis.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 6e7087c687..0b57a75cd7 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -29,7 +29,7 @@ This document explains the changes made to Iris for this release ✨ Features =========== - + #. `@trexfeathers`_ and `@HGWright`_ (reviewer) sub-categorised all Iris' :class:`UserWarning`\s for richer filtering. The full index of sub-categories can be seen here: :mod:`iris.exceptions` . (:pull:`5498`) @@ -44,6 +44,10 @@ This document explains the changes made to Iris for this release Winter - December to February) will be assigned to the preceding year (e.g. the year of December) instead of the following year (the default behaviour). (:pull:`5573`) + + #. `@HGWright`_ added :attr:`~iris.coords.Coord.ignore_axis` to allow manual + intervention preventing :func:`~iris.util.guess_coord_axis` from acting on a + coordinate. (:pull:`5551`) 🐛 Bugs Fixed diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 30de08d496..8af7ee0c8a 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -36,6 +36,9 @@ import iris.time import iris.util +#: The default value for ignore_axis which controls guess_coord_axis' behaviour +DEFAULT_IGNORE_AXIS = False + class _DimensionalMetadata(CFVariableMixin, metaclass=ABCMeta): """ @@ -860,7 +863,6 @@ def xml_element(self, doc): element.setAttribute( "climatological", str(self.climatological) ) - if self.attributes: attributes_element = doc.createElement("attributes") for name in sorted(self.attributes.keys()): @@ -1593,6 +1595,8 @@ def __init__( self.bounds = bounds self.climatological = climatological + self._ignore_axis = DEFAULT_IGNORE_AXIS + def copy(self, points=None, bounds=None): """ Returns a copy of this coordinate. @@ -1625,6 +1629,10 @@ def copy(self, points=None, bounds=None): # self. new_coord.bounds = bounds + # The state of ignore_axis is controlled by the coordinate rather than + # the metadata manager + new_coord.ignore_axis = self.ignore_axis + return new_coord @classmethod @@ -1644,7 +1652,14 @@ def from_coord(cls, coord): if issubclass(cls, DimCoord): # DimCoord introduces an extra constructor keyword. kwargs["circular"] = getattr(coord, "circular", False) - return cls(**kwargs) + + new_coord = cls(**kwargs) + + # The state of ignore_axis is controlled by the coordinate rather than + # the metadata manager + new_coord.ignore_axis = coord.ignore_axis + + return new_coord @property def points(self): @@ -1736,6 +1751,24 @@ def climatological(self, value): self._metadata_manager.climatological = value + @property + def ignore_axis(self): + """ + A boolean that controls whether guess_coord_axis acts on this + coordinate. + + Defaults to False, and when set to True it will be skipped by + guess_coord_axis. + """ + return self._ignore_axis + + @ignore_axis.setter + def ignore_axis(self, value): + if not isinstance(value, bool): + emsg = "'ignore_axis' can only be set to 'True' or 'False'" + raise ValueError(emsg) + self._ignore_axis = value + def lazy_points(self): """ Return a lazy array representing the coord points. @@ -2694,7 +2727,6 @@ def __init__( Will set to True when a climatological time axis is loaded from NetCDF. Always False if no bounds exist. - """ # Configure the metadata manager. self._metadata_manager = metadata_manager_factory(DimCoordMetadata) diff --git a/lib/iris/tests/unit/conftest.py b/lib/iris/tests/unit/conftest.py new file mode 100644 index 0000000000..a4ddb89294 --- /dev/null +++ b/lib/iris/tests/unit/conftest.py @@ -0,0 +1,14 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests fixture infra-structure.""" +import pytest + +import iris + + +@pytest.fixture +def sample_coord(): + sample_coord = iris.coords.DimCoord(points=(1, 2, 3, 4, 5)) + return sample_coord diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 1c9c3cce2d..14dcdf7ca0 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -14,6 +14,7 @@ import dask.array as da import numpy as np +import pytest import iris from iris.coords import AuxCoord, Coord, DimCoord @@ -1149,6 +1150,39 @@ def test_change_units(self): self.assertFalse(coord.climatological) +class TestIgnoreAxis: + def test_default(self, sample_coord): + assert sample_coord.ignore_axis is False + + def test_set_true(self, sample_coord): + sample_coord.ignore_axis = True + assert sample_coord.ignore_axis is True + + def test_set_random_value(self, sample_coord): + with pytest.raises( + ValueError, + match=r"'ignore_axis' can only be set to 'True' or 'False'", + ): + sample_coord.ignore_axis = "foo" + + @pytest.mark.parametrize( + "ignore_axis, copy_or_from, result", + [ + (True, "copy", True), + (True, "from_coord", True), + (False, "copy", False), + (False, "from_coord", False), + ], + ) + def test_copy_coord(self, ignore_axis, copy_or_from, result, sample_coord): + sample_coord.ignore_axis = ignore_axis + if copy_or_from == "copy": + new_coord = sample_coord.copy() + elif copy_or_from == "from_coord": + new_coord = sample_coord.from_coord(sample_coord) + assert new_coord.ignore_axis is result + + class Test___init____abstractmethod(tests.IrisTest): def test(self): emsg = ( diff --git a/lib/iris/tests/unit/util/test_guess_coord_axis.py b/lib/iris/tests/unit/util/test_guess_coord_axis.py new file mode 100644 index 0000000000..d946565196 --- /dev/null +++ b/lib/iris/tests/unit/util/test_guess_coord_axis.py @@ -0,0 +1,50 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Test function :func:`iris.util.guess_coord_axis`.""" + +import pytest + +from iris.util import guess_coord_axis + + +class TestGuessCoord: + @pytest.mark.parametrize( + "coordinate, axis", + [ + ("longitude", "X"), + ("grid_longitude", "X"), + ("projection_x_coordinate", "X"), + ("latitude", "Y"), + ("grid_latitude", "Y"), + ("projection_y_coordinate", "Y"), + ], + ) + def test_coord(self, coordinate, axis, sample_coord): + sample_coord.standard_name = coordinate + assert guess_coord_axis(sample_coord) == axis + + @pytest.mark.parametrize( + "units, axis", + [ + ("hPa", "Z"), + ("days since 1970-01-01 00:00:00", "T"), + ], + ) + def test_units(self, units, axis, sample_coord): + sample_coord.units = units + assert guess_coord_axis(sample_coord) == axis + + @pytest.mark.parametrize( + "ignore_axis, result", + [ + (True, None), + (False, "X"), + ], + ) + def test_ignore_axis(self, ignore_axis, result, sample_coord): + sample_coord.standard_name = "longitude" + sample_coord.ignore_axis = ignore_axis + + assert guess_coord_axis(sample_coord) == result diff --git a/lib/iris/util.py b/lib/iris/util.py index ee415d230e..4509f2885b 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -257,10 +257,17 @@ def guess_coord_axis(coord): This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. + The ``guess_coord_axis`` behaviour can be skipped by setting the coordinate property ``ignore_axis`` + to ``False``. + """ + axis = None - if coord.standard_name in ( + if hasattr(coord, "ignore_axis") and coord.ignore_axis is True: + return axis + + elif coord.standard_name in ( "longitude", "grid_longitude", "projection_x_coordinate", From 20393cc4a6569080548bf484df93f9b989beceab Mon Sep 17 00:00:00 2001 From: stephenworsley <49274989+stephenworsley@users.noreply.github.com> Date: Mon, 20 Nov 2023 16:54:42 +0000 Subject: [PATCH 084/134] Refactor area weighted regridding, improve performance (#5543) * refactor area weighted regridding * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix test failures * remove old code, fix tests * fix tests, make masking more robust * fix tests, maintain dtype behaviour * fix tests, remove old code * fix tests * fix out of bounds and circularity handling * fix test * add test * add documentation, avoid unnecessary regrid calls * remove unnecessary code, improve coverage * add docstrings * minor fixes * change dimension ordering to match curvilinear regridding * make x-y ordering more consistent with existing implementations * add documentation, tidy code * add documentation, reset test * add documentation * address review comments --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- lib/iris/analysis/_area_weighted.py | 1041 +++++------------ ..._area_weighted_rectilinear_src_and_grid.py | 58 + .../test_AreaWeightedRegridder.py | 2 +- 3 files changed, 364 insertions(+), 737 deletions(-) diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index ffec82fd4e..bd2ad90a3a 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -7,6 +7,7 @@ import cf_units import numpy as np import numpy.ma as ma +from scipy.sparse import csr_array from iris._lazy_data import map_complete_blocks from iris.analysis._interpolation import get_xy_dim_coords, snapshot_grid @@ -75,8 +76,7 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): self.grid_y, self.meshgrid_x, self.meshgrid_y, - self.weights_info, - self.index_info, + self.weights, ) = _regrid_info def __call__(self, cube): @@ -125,8 +125,7 @@ def __call__(self, cube): self.grid_y, self.meshgrid_x, self.meshgrid_y, - self.weights_info, - self.index_info, + self.weights, ) return _regrid_area_weighted_rectilinear_src_and_grid__perform( cube, _regrid_info, mdtol=self._mdtol @@ -224,468 +223,17 @@ def _get_xy_coords(cube): return x_coord, y_coord -def _within_bounds(src_bounds, tgt_bounds, orderswap=False): - """ - Determine which target bounds lie within the extremes of the source bounds. - - Args: - - * src_bounds (ndarray): - An (n, 2) shaped array of monotonic contiguous source bounds. - * tgt_bounds (ndarray): - An (n, 2) shaped array corresponding to the target bounds. - - Kwargs: - - * orderswap (bool): - A Boolean indicating whether the target bounds are in descending order - (True). Defaults to False. - - Returns: - Boolean ndarray, indicating whether each target bound is within the - extremes of the source bounds. - - """ - min_bound = np.min(src_bounds) - 1e-14 - max_bound = np.max(src_bounds) + 1e-14 - - # Swap upper-lower is necessary. - if orderswap is True: - upper, lower = tgt_bounds.T - else: - lower, upper = tgt_bounds.T - - return ((lower <= max_bound) * (lower >= min_bound)) * ( - (upper <= max_bound) * (upper >= min_bound) - ) - - -def _cropped_bounds(bounds, lower, upper): - """ - Return a new bounds array and corresponding slice object (or indices) of - the original data array, resulting from cropping the provided bounds - between the specified lower and upper values. The bounds at the - extremities will be truncated so that they start and end with lower and - upper. - - This function will return an empty NumPy array and slice if there is no - overlap between the region covered by bounds and the region from lower to - upper. - - If lower > upper the resulting bounds may not be contiguous and the - indices object will be a tuple of indices rather than a slice object. - - Args: - - * bounds: - An (n, 2) shaped array of monotonic contiguous bounds. - * lower: - Lower bound at which to crop the bounds array. - * upper: - Upper bound at which to crop the bounds array. - - Returns: - A tuple of the new bounds array and the corresponding slice object or - indices from the zeroth axis of the original array. - - """ - reversed_flag = False - # Ensure order is increasing. - if bounds[0, 0] > bounds[-1, 0]: - # Reverse bounds - bounds = bounds[::-1, ::-1] - reversed_flag = True - - # Number of bounds. - n = bounds.shape[0] - - if lower <= upper: - if lower > bounds[-1, 1] or upper < bounds[0, 0]: - new_bounds = bounds[0:0] - indices = slice(0, 0) - else: - # A single region lower->upper. - if lower < bounds[0, 0]: - # Region extends below bounds so use first lower bound. - lindex = 0 - lower = bounds[0, 0] - else: - # Index of last lower bound less than or equal to lower. - lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] - if upper > bounds[-1, 1]: - # Region extends above bounds so use last upper bound. - uindex = n - 1 - upper = bounds[-1, 1] - else: - # Index of first upper bound greater than or equal to - # upper. - uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] - # Extract the bounds in our region defined by lower->upper. - new_bounds = np.copy(bounds[lindex : (uindex + 1), :]) - # Replace first and last values with specified bounds. - new_bounds[0, 0] = lower - new_bounds[-1, 1] = upper - if reversed_flag: - indices = slice(n - (uindex + 1), n - lindex) - else: - indices = slice(lindex, uindex + 1) - else: - # Two regions [0]->upper, lower->[-1] - # [0]->upper - if upper < bounds[0, 0]: - # Region outside src bounds. - new_bounds_left = bounds[0:0] - indices_left = tuple() - slice_left = slice(0, 0) - else: - if upper > bounds[-1, 1]: - # Whole of bounds. - uindex = n - 1 - upper = bounds[-1, 1] - else: - # Index of first upper bound greater than or equal to upper. - uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] - # Extract the bounds in our region defined by [0]->upper. - new_bounds_left = np.copy(bounds[0 : (uindex + 1), :]) - # Replace last value with specified bound. - new_bounds_left[-1, 1] = upper - if reversed_flag: - indices_left = tuple(range(n - (uindex + 1), n)) - slice_left = slice(n - (uindex + 1), n) - else: - indices_left = tuple(range(0, uindex + 1)) - slice_left = slice(0, uindex + 1) - # lower->[-1] - if lower > bounds[-1, 1]: - # Region is outside src bounds. - new_bounds_right = bounds[0:0] - indices_right = tuple() - slice_right = slice(0, 0) - else: - if lower < bounds[0, 0]: - # Whole of bounds. - lindex = 0 - lower = bounds[0, 0] - else: - # Index of last lower bound less than or equal to lower. - lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] - # Extract the bounds in our region defined by lower->[-1]. - new_bounds_right = np.copy(bounds[lindex:, :]) - # Replace first value with specified bound. - new_bounds_right[0, 0] = lower - if reversed_flag: - indices_right = tuple(range(0, n - lindex)) - slice_right = slice(0, n - lindex) - else: - indices_right = tuple(range(lindex, n)) - slice_right = slice(lindex, None) - - if reversed_flag: - # Flip everything around. - indices_left, indices_right = indices_right, indices_left - slice_left, slice_right = slice_right, slice_left - - # Combine regions. - new_bounds = np.concatenate((new_bounds_left, new_bounds_right)) - # Use slices if possible, but if we have two regions use indices. - if indices_left and indices_right: - indices = indices_left + indices_right - elif indices_left: - indices = slice_left - elif indices_right: - indices = slice_right - else: - indices = slice(0, 0) - - if reversed_flag: - new_bounds = new_bounds[::-1, ::-1] - - return new_bounds, indices - - -def _cartesian_area(y_bounds, x_bounds): - """ - Return an array of the areas of each cell given two arrays - of cartesian bounds. - - Args: - - * y_bounds: - An (n, 2) shaped NumPy array. - * x_bounds: - An (m, 2) shaped NumPy array. - - Returns: - An (n, m) shaped Numpy array of areas. - - """ - heights = y_bounds[:, 1] - y_bounds[:, 0] - widths = x_bounds[:, 1] - x_bounds[:, 0] - return np.abs(np.outer(heights, widths)) - - -def _spherical_area(y_bounds, x_bounds, radius=1.0): +def _get_bounds_in_units(coord, units, dtype): """ - Return an array of the areas of each cell on a sphere - given two arrays of latitude and longitude bounds in radians. - - Args: - - * y_bounds: - An (n, 2) shaped NumPy array of latitude bounds in radians. - * x_bounds: - An (m, 2) shaped NumPy array of longitude bounds in radians. - * radius: - Radius of the sphere. Default is 1.0. - - Returns: - An (n, m) shaped Numpy array of areas. + Return a copy of coord's bounds in the specified units and dtype. + Return as contiguous bounds. """ - return iris.analysis.cartography._quadrant_area(y_bounds, x_bounds, radius) - - -def _get_bounds_in_units(coord, units, dtype): - """Return a copy of coord's bounds in the specified units and dtype.""" # The bounds are cast to dtype before conversion to prevent issues when # mixing float32 and float64 types. - return coord.units.convert(coord.bounds.astype(dtype), units).astype(dtype) - - -def _weighted_mean_with_mdtol(data, weights, axis=None, mdtol=0): - """ - Return the weighted mean of an array over the specified axis - using the provided weights (if any) and a permitted fraction of - masked data. - - Args: - - * data (array-like): - Data to be averaged. - - * weights (array-like): - An array of the same shape as the data that specifies the contribution - of each corresponding data element to the calculated mean. - - Kwargs: - - * axis (int or tuple of ints): - Axis along which the mean is computed. The default is to compute - the mean of the flattened array. - - * mdtol (float): - Tolerance of missing data. The value returned in each element of the - returned array will be masked if the fraction of masked data exceeds - mdtol. This fraction is weighted by the `weights` array if one is - provided. mdtol=0 means no missing data is tolerated - while mdtol=1 will mean the resulting element will be masked if and - only if all the contributing elements of data are masked. - Defaults to 0. - - Returns: - Numpy array (possibly masked) or scalar. - - """ - if ma.is_masked(data): - res, unmasked_weights_sum = ma.average( - data, weights=weights, axis=axis, returned=True - ) - if mdtol < 1: - weights_sum = weights.sum(axis=axis) - frac_masked = 1 - np.true_divide(unmasked_weights_sum, weights_sum) - mask_pt = frac_masked > mdtol - if np.any(mask_pt) and not isinstance(res, ma.core.MaskedConstant): - if np.isscalar(res): - res = ma.masked - elif ma.isMaskedArray(res): - res.mask |= mask_pt - else: - res = ma.masked_array(res, mask=mask_pt) - else: - res = np.average(data, weights=weights, axis=axis) - return res - - -def _regrid_area_weighted_array( - src_data, x_dim, y_dim, weights_info, index_info, mdtol=0 -): - """ - Regrid the given data from its source grid to a new grid using - an area weighted mean to determine the resulting data values. - - .. note:: - - Elements in the returned array that lie either partially - or entirely outside of the extent of the source grid will - be masked irrespective of the value of mdtol. - - Args: - - * src_data: - An N-dimensional NumPy array. - * x_dim: - The X dimension within `src_data`. - * y_dim: - The Y dimension within `src_data`. - * weights_info: - The area weights information to be used for area-weighted - regridding. - - Kwargs: - - * mdtol: - Tolerance of missing data. The value returned in each element of the - returned array will be masked if the fraction of missing data exceeds - mdtol. This fraction is calculated based on the area of masked cells - within each target cell. mdtol=0 means no missing data is tolerated - while mdtol=1 will mean the resulting element will be masked if and - only if all the overlapping elements of the source grid are masked. - Defaults to 0. - - Returns: - The regridded data as an N-dimensional NumPy array. The lengths - of the X and Y dimensions will now match those of the target - grid. - - """ - ( - blank_weights, - src_area_weights, - new_data_mask_basis, - ) = weights_info - - ( - result_x_extent, - result_y_extent, - square_data_indices_y, - square_data_indices_x, - src_area_datas_required, - ) = index_info - - # Ensure we have x_dim and y_dim. - x_dim_orig = x_dim - y_dim_orig = y_dim - if y_dim is None: - src_data = np.expand_dims(src_data, axis=src_data.ndim) - y_dim = src_data.ndim - 1 - if x_dim is None: - src_data = np.expand_dims(src_data, axis=src_data.ndim) - x_dim = src_data.ndim - 1 - # Move y_dim and x_dim to last dimensions - if not x_dim == src_data.ndim - 1: - src_data = np.moveaxis(src_data, x_dim, -1) - if not y_dim == src_data.ndim - 2: - if x_dim < y_dim: - # note: y_dim was shifted along by one position when - # x_dim was moved to the last dimension - src_data = np.moveaxis(src_data, y_dim - 1, -2) - elif x_dim > y_dim: - src_data = np.moveaxis(src_data, y_dim, -2) - x_dim = src_data.ndim - 1 - y_dim = src_data.ndim - 2 - - # Create empty "pre-averaging" data array that will enable the - # src_data data corresponding to a given target grid point, - # to be stacked per point. - # Note that dtype is not preserved and that the array mask - # allows for regions that do not overlap. - new_shape = list(src_data.shape) - new_shape[x_dim] = result_x_extent - new_shape[y_dim] = result_y_extent - - # Use input cube dtype or convert values to the smallest possible float - # dtype when necessary. - dtype = np.promote_types(src_data.dtype, np.float16) - - # Axes of data over which the weighted mean is calculated. - axis = (y_dim, x_dim) - - # Use previously established indices - - src_area_datas_square = src_data[ - ..., square_data_indices_y, square_data_indices_x - ] - - _, src_area_datas_required = np.broadcast_arrays( - src_area_datas_square, src_area_datas_required - ) - - src_area_datas = np.where( - src_area_datas_required, src_area_datas_square, 0 - ) - - # Flag to indicate whether the original data was a masked array. - src_masked = src_data.mask.any() if ma.isMaskedArray(src_data) else False - if src_masked: - src_area_masks_square = src_data.mask[ - ..., square_data_indices_y, square_data_indices_x - ] - src_area_masks = np.where( - src_area_datas_required, src_area_masks_square, True - ) - - else: - # If the weights were originally blank, set the weights to all 1 to - # avoid divide by 0 error and set the new data mask for making the - # values 0 - src_area_weights = np.where(blank_weights, 1, src_area_weights) - - new_data_mask = np.broadcast_to(new_data_mask_basis, new_shape) - - # Broadcast the weights array to allow numpy's ma.average - # to be called. - # Assign new shape to raise error on copy. - src_area_weights.shape = src_area_datas.shape[-3:] - # Broadcast weights to match shape of data. - _, src_area_weights = np.broadcast_arrays(src_area_datas, src_area_weights) - - # Mask the data points - if src_masked: - src_area_datas = np.ma.array(src_area_datas, mask=src_area_masks) - - # Calculate weighted mean taking into account missing data. - new_data = _weighted_mean_with_mdtol( - src_area_datas, weights=src_area_weights, axis=axis, mdtol=mdtol - ) - new_data = new_data.reshape(new_shape) - if src_masked: - new_data_mask = new_data.mask - - # Mask the data if originally masked or if the result has masked points - if ma.isMaskedArray(src_data): - new_data = ma.array( - new_data, - mask=new_data_mask, - fill_value=src_data.fill_value, - dtype=dtype, - ) - elif new_data_mask.any(): - new_data = ma.array(new_data, mask=new_data_mask, dtype=dtype) - else: - new_data = new_data.astype(dtype) - - # Restore data to original form - if x_dim_orig is None and y_dim_orig is None: - new_data = np.squeeze(new_data, axis=x_dim) - new_data = np.squeeze(new_data, axis=y_dim) - elif y_dim_orig is None: - new_data = np.squeeze(new_data, axis=y_dim) - new_data = np.moveaxis(new_data, -1, x_dim_orig) - elif x_dim_orig is None: - new_data = np.squeeze(new_data, axis=x_dim) - new_data = np.moveaxis(new_data, -1, y_dim_orig) - elif x_dim_orig < y_dim_orig: - # move the x_dim back first, so that the y_dim will - # then be moved to its original position - new_data = np.moveaxis(new_data, -1, x_dim_orig) - new_data = np.moveaxis(new_data, -1, y_dim_orig) - else: - # move the y_dim back first, so that the x_dim will - # then be moved to its original position - new_data = np.moveaxis(new_data, -2, y_dim_orig) - new_data = np.moveaxis(new_data, -1, x_dim_orig) - - return new_data + return coord.units.convert( + coord.contiguous_bounds().astype(dtype), units + ).astype(dtype) def _regrid_area_weighted_rectilinear_src_and_grid__prepare( @@ -775,290 +323,51 @@ def _regrid_area_weighted_rectilinear_src_and_grid__prepare( # Create 2d meshgrids as required by _create_cube func. meshgrid_x, meshgrid_y = _meshgrid(grid_x.points, grid_y.points) - # Determine whether target grid bounds are decreasing. This must - # be determined prior to wrap_lons being called. - grid_x_decreasing = grid_x_bounds[-1, 0] < grid_x_bounds[0, 0] - grid_y_decreasing = grid_y_bounds[-1, 0] < grid_y_bounds[0, 0] - # Wrapping of longitudes. if spherical: - base = np.min(src_x_bounds) modulus = x_units.modulus - # Only wrap if necessary to avoid introducing floating - # point errors. - if np.min(grid_x_bounds) < base or np.max(grid_x_bounds) > ( - base + modulus - ): - grid_x_bounds = iris.analysis.cartography.wrap_lons( - grid_x_bounds, base, modulus - ) - - # Determine whether the src_x coord has periodic boundary conditions. - circular = getattr(src_x, "circular", False) - - # Use simple cartesian area function or one that takes into - # account the curved surface if coord system is spherical. - if spherical: - area_func = _spherical_area else: - area_func = _cartesian_area + modulus = None def _calculate_regrid_area_weighted_weights( src_x_bounds, src_y_bounds, grid_x_bounds, grid_y_bounds, - grid_x_decreasing, - grid_y_decreasing, - area_func, - circular=False, + spherical, + modulus=None, ): - """ - Compute the area weights used for area-weighted regridding. - Args: - * src_x_bounds: - A NumPy array of bounds along the X axis defining the source grid. - * src_y_bounds: - A NumPy array of bounds along the Y axis defining the source grid. - * grid_x_bounds: - A NumPy array of bounds along the X axis defining the new grid. - * grid_y_bounds: - A NumPy array of bounds along the Y axis defining the new grid. - * grid_x_decreasing: - Boolean indicating whether the X coordinate of the new grid is - in descending order. - * grid_y_decreasing: - Boolean indicating whether the Y coordinate of the new grid is - in descending order. - * area_func: - A function that returns an (p, q) array of weights given an (p, 2) - shaped array of Y bounds and an (q, 2) shaped array of X bounds. - Kwargs: - * circular: - A boolean indicating whether the `src_x_bounds` are periodic. - Default is False. - Returns: - The area weights to be used for area-weighted regridding. - """ - # Determine which grid bounds are within src extent. - y_within_bounds = _within_bounds( - src_y_bounds, grid_y_bounds, grid_y_decreasing - ) - x_within_bounds = _within_bounds( - src_x_bounds, grid_x_bounds, grid_x_decreasing + """Return weights matrix to be used in regridding.""" + src_shape = (len(src_x_bounds) - 1, len(src_y_bounds) - 1) + tgt_shape = (len(grid_x_bounds) - 1, len(grid_y_bounds) - 1) + + if spherical: + # Changing the dtype here replicates old regridding behaviour. + dtype = np.float64 + src_x_bounds = src_x_bounds.astype(dtype) + src_y_bounds = src_y_bounds.astype(dtype) + grid_x_bounds = grid_x_bounds.astype(dtype) + grid_y_bounds = grid_y_bounds.astype(dtype) + + src_y_bounds = np.sin(src_y_bounds) + grid_y_bounds = np.sin(grid_y_bounds) + x_info = _get_coord_to_coord_matrix_info( + src_x_bounds, grid_x_bounds, circular=spherical, mod=modulus ) - - # Cache which src_bounds are within grid bounds - cached_x_bounds = [] - cached_x_indices = [] - max_x_indices = 0 - for x_0, x_1 in grid_x_bounds: - if grid_x_decreasing: - x_0, x_1 = x_1, x_0 - x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1) - cached_x_bounds.append(x_bounds) - cached_x_indices.append(x_indices) - # Keep record of the largest slice - if isinstance(x_indices, slice): - x_indices_size = np.sum(x_indices.stop - x_indices.start) - else: # is tuple of indices - x_indices_size = len(x_indices) - if x_indices_size > max_x_indices: - max_x_indices = x_indices_size - - # Cache which y src_bounds areas and weights are within grid bounds - cached_y_indices = [] - cached_weights = [] - max_y_indices = 0 - for j, (y_0, y_1) in enumerate(grid_y_bounds): - # Reverse lower and upper if dest grid is decreasing. - if grid_y_decreasing: - y_0, y_1 = y_1, y_0 - y_bounds, y_indices = _cropped_bounds(src_y_bounds, y_0, y_1) - cached_y_indices.append(y_indices) - # Keep record of the largest slice - if isinstance(y_indices, slice): - y_indices_size = np.sum(y_indices.stop - y_indices.start) - else: # is tuple of indices - y_indices_size = len(y_indices) - if y_indices_size > max_y_indices: - max_y_indices = y_indices_size - - weights_i = [] - for i, (x_0, x_1) in enumerate(grid_x_bounds): - # Reverse lower and upper if dest grid is decreasing. - if grid_x_decreasing: - x_0, x_1 = x_1, x_0 - x_bounds = cached_x_bounds[i] - x_indices = cached_x_indices[i] - - # Determine whether element i, j overlaps with src and hence - # an area weight should be computed. - # If x_0 > x_1 then we want [0]->x_1 and x_0->[0] + mod in the case - # of wrapped longitudes. However if the src grid is not global - # (i.e. circular) this new cell would include a region outside of - # the extent of the src grid and thus the weight is therefore - # invalid. - outside_extent = x_0 > x_1 and not circular - if ( - outside_extent - or not y_within_bounds[j] - or not x_within_bounds[i] - ): - weights = False - else: - # Calculate weights based on areas of cropped bounds. - if isinstance(x_indices, tuple) and isinstance( - y_indices, tuple - ): - raise RuntimeError( - "Cannot handle split bounds " "in both x and y." - ) - weights = area_func(y_bounds, x_bounds) - weights_i.append(weights) - cached_weights.append(weights_i) - return ( - tuple(cached_x_indices), - tuple(cached_y_indices), - max_x_indices, - max_y_indices, - tuple(cached_weights), + y_info = _get_coord_to_coord_matrix_info(src_y_bounds, grid_y_bounds) + weights_matrix = _combine_xy_weights( + x_info, y_info, src_shape, tgt_shape ) + return weights_matrix - ( - cached_x_indices, - cached_y_indices, - max_x_indices, - max_y_indices, - cached_weights, - ) = _calculate_regrid_area_weighted_weights( + weights = _calculate_regrid_area_weighted_weights( src_x_bounds, src_y_bounds, grid_x_bounds, grid_y_bounds, - grid_x_decreasing, - grid_y_decreasing, - area_func, - circular, - ) - - # Go further, calculating the full weights array that we'll need in the - # perform step and the indices we'll need to extract from the cube we're - # regridding (src_data) - - result_y_extent = len(grid_y_bounds) - result_x_extent = len(grid_x_bounds) - - # Total number of points - num_target_pts = result_y_extent * result_x_extent - - # Create empty array to hold weights - src_area_weights = np.zeros( - list((max_y_indices, max_x_indices, num_target_pts)) + spherical, + modulus, ) - - # Built for the case where the source cube isn't masked - blank_weights = np.zeros((num_target_pts,)) - new_data_mask_basis = np.full( - (len(cached_y_indices), len(cached_x_indices)), False, dtype=np.bool_ - ) - - # To permit fancy indexing, we need to store our data in an array whose - # first two dimensions represent the indices needed for the target cell. - # Since target cells can require a different number of indices, the size of - # these dimensions should be the maximum of this number. - # This means we need to track whether the data in - # that array is actually required and build those squared-off arrays - # TODO: Consider if a proper mask would be better - src_area_datas_required = np.full( - (max_y_indices, max_x_indices, num_target_pts), False - ) - square_data_indices_y = np.zeros( - (max_y_indices, max_x_indices, num_target_pts), dtype=int - ) - square_data_indices_x = np.zeros( - (max_y_indices, max_x_indices, num_target_pts), dtype=int - ) - - # Stack the weights for each target point and build the indices we'll need - # to extract the src_area_data - target_pt_ji = -1 - for j, y_indices in enumerate(cached_y_indices): - for i, x_indices in enumerate(cached_x_indices): - target_pt_ji += 1 - # Determine whether to mask element i, j based on whether - # there are valid weights. - weights = cached_weights[j][i] - if weights is False: - # Prepare for the src_data not being masked by storing the - # information that will let us fill the data with zeros and - # weights as one. The weighted average result will be the same, - # but we avoid dividing by zero. - blank_weights[target_pt_ji] = True - new_data_mask_basis[j, i] = True - else: - # Establish which indices are actually in y_indices and x_indices - if isinstance(y_indices, slice): - y_indices = list( - range( - y_indices.start, - y_indices.stop, - y_indices.step or 1, - ) - ) - else: - y_indices = list(y_indices) - - if isinstance(x_indices, slice): - x_indices = list( - range( - x_indices.start, - x_indices.stop, - x_indices.step or 1, - ) - ) - else: - x_indices = list(x_indices) - - # For the weights, we just need the lengths of these as we're - # dropping them into a pre-made array - - len_y = len(y_indices) - len_x = len(x_indices) - - src_area_weights[0:len_y, 0:len_x, target_pt_ji] = weights - - # To build the indices for the source cube, we need equal - # shaped array so we pad with 0s and record the need to mask - # them in src_area_datas_required - padded_y_indices = y_indices + [0] * (max_y_indices - len_y) - padded_x_indices = x_indices + [0] * (max_x_indices - len_x) - - square_data_indices_y[..., target_pt_ji] = np.array( - padded_y_indices - )[:, np.newaxis] - square_data_indices_x[..., target_pt_ji] = padded_x_indices - - src_area_datas_required[0:len_y, 0:len_x, target_pt_ji] = True - - # Package up the return data - - weights_info = ( - blank_weights, - src_area_weights, - new_data_mask_basis, - ) - - index_info = ( - result_x_extent, - result_y_extent, - square_data_indices_y, - square_data_indices_x, - src_area_datas_required, - ) - - # Now return it - return ( src_x, src_y, @@ -1068,8 +377,7 @@ def _calculate_regrid_area_weighted_weights( grid_y, meshgrid_x, meshgrid_y, - weights_info, - index_info, + weights, ) @@ -1091,17 +399,18 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform( grid_y, meshgrid_x, meshgrid_y, - weights_info, - index_info, + weights, ) = regrid_info + tgt_shape = (len(grid_y.points), len(grid_x.points)) + # Calculate new data array for regridded cube. regrid = functools.partial( - _regrid_area_weighted_array, + _regrid_along_dims, x_dim=src_x_dim, y_dim=src_y_dim, - weights_info=weights_info, - index_info=index_info, + weights=weights, + tgt_shape=tgt_shape, mdtol=mdtol, ) @@ -1120,9 +429,9 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform( ) # TODO: investigate if an area weighted callback would be more appropriate. # _regrid_callback = functools.partial( - # _regrid_area_weighted_array, - # weights_info=weights_info, - # index_info=index_info, + # _regrid_along_dims, + # weights=weights, + # tgt_shape=tgt_shape, # mdtol=mdtol, # ) @@ -1149,3 +458,263 @@ def regrid_callback(*args, **kwargs): new_cube = new_cube[tuple(indices)] return new_cube + + +def _get_coord_to_coord_matrix_info( + src_bounds, tgt_bounds, circular=False, mod=None +): + """ + First part of weight calculation. + + Calculate the weights contribution from a single pair of + coordinate bounds. Search for pairs of overlapping source and + target bounds and associate weights with them. + + Note: this assumes that the bounds are monotonic. + """ + # Calculate the number of cells represented by the bounds. + m = len(tgt_bounds) - 1 + n = len(src_bounds) - 1 + + # Ensure bounds are strictly increasing. + src_decreasing = src_bounds[0] > src_bounds[1] + tgt_decreasing = tgt_bounds[0] > tgt_bounds[1] + if src_decreasing: + src_bounds = src_bounds[::-1] + if tgt_decreasing: + tgt_bounds = tgt_bounds[::-1] + + if circular: + # For circular coordinates (e.g. longitude) account for source and + # target bounds which span different ranges (e.g. (-180, 180) vs + # (0, 360)). We ensure that all possible overlaps between source and + # target bounds are accounted for by including two copies of the + # source bounds, shifted appropriately by the modulus. + adjust = (tgt_bounds.min() - src_bounds.min()) // mod + src_bounds = src_bounds + (mod * adjust) + src_bounds = np.append(src_bounds, src_bounds + mod) + nn = (2 * n) + 1 + else: + nn = n + + # Before iterating through pairs of overlapping bounds, find an + # appropriate place to start iteration. Note that this assumes that + # the bounds are increasing. + i = max(np.searchsorted(tgt_bounds, src_bounds[0], side="right") - 1, 0) + j = max(np.searchsorted(src_bounds, tgt_bounds[0], side="right") - 1, 0) + + data = [] + rows = [] + cols = [] + + # Iterate through overlapping cells in the source and target bounds. + # For the sake of calculations, we keep track of the minimum value of + # the intersection of each cell. + floor = max(tgt_bounds[i], src_bounds[j]) + while i < m and j < nn: + # Record the current indices. + rows.append(i) + cols.append(j) + + # Determine the next indices and floor. + if tgt_bounds[i + 1] < src_bounds[j + 1]: + next_floor = tgt_bounds[i + 1] + next_i = i + 1 + elif tgt_bounds[i + 1] == src_bounds[j + 1]: + next_floor = tgt_bounds[i + 1] + next_i = i + 1 + j += 1 + else: + next_floor = src_bounds[j + 1] + next_i = i + j += 1 + + # Calculate and record the weight for the current overlapping cells. + weight = (next_floor - floor) / (tgt_bounds[i + 1] - tgt_bounds[i]) + data.append(weight) + + # Update indices and floor + i = next_i + floor = next_floor + + data = np.array(data) + rows = np.array(rows) + cols = np.array(cols) + + if circular: + # Remove out of bounds points. When the source bounds were duplicated + # an "out of bounds" cell was introduced between the two copies. + oob = np.where(cols == n) + data = np.delete(data, oob) + rows = np.delete(rows, oob) + cols = np.delete(cols, oob) + + # Wrap indices. Since we duplicated the source bounds there may be + # indices which are greater than n which will need to be corrected. + cols = cols % (n + 1) + + # Correct indices which were flipped due to reversing decreasing bounds. + if src_decreasing: + cols = n - cols - 1 + if tgt_decreasing: + rows = m - rows - 1 + + return data, rows, cols + + +def _combine_xy_weights(x_info, y_info, src_shape, tgt_shape): + """ + Second part of weight calculation. + + Combine the weights contributions from both pairs of coordinate + bounds (i.e. the source/target pairs for the x and y coords). + Return the result as a sparse array. + """ + x_src, y_src = src_shape + x_tgt, y_tgt = tgt_shape + src_size = x_src * y_src + tgt_size = x_tgt * y_tgt + x_weight, x_rows, x_cols = x_info + y_weight, y_rows, y_cols = y_info + + # Regridding weights will be applied to a flattened (y, x) array. + # Weights and indices are constructed in a way to account for this. + # Weights of the combined matrix are constructed by broadcasting + # the x_weights and y_weights. The resulting array contains every + # combination of x weight and y weight. Then we flatten this array. + xy_weight = y_weight[:, np.newaxis] * x_weight[np.newaxis, :] + xy_weight = xy_weight.flatten() + + # Given the x index and y index associated with a weight, calculate + # the equivalent index in the flattened (y, x) array. + xy_rows = (y_rows[:, np.newaxis] * x_tgt) + x_rows[np.newaxis, :] + xy_rows = xy_rows.flatten() + xy_cols = (y_cols[:, np.newaxis] * x_src) + x_cols[np.newaxis, :] + xy_cols = xy_cols.flatten() + + # Create a sparse matrix for efficient weight application. + combined_weights = csr_array( + (xy_weight, (xy_rows, xy_cols)), shape=(tgt_size, src_size) + ) + return combined_weights + + +def _standard_regrid_no_masks(data, weights, tgt_shape): + """ + Regrid unmasked data to an unmasked result. + + Assumes that the first two dimensions are the x-y grid. + """ + # Reshape data to a form suitable for matrix multiplication. + extra_shape = data.shape[:-2] + data = data.reshape(-1, np.prod(data.shape[-2:])) + + # Apply regridding weights. + # The order of matrix multiplication is chosen to be consistent + # with existing regridding code. + result = data @ weights.T + + # Reshape result to a suitable form. + result = result.reshape(*(extra_shape + tgt_shape)) + return result + + +def _standard_regrid(data, weights, tgt_shape, mdtol): + """ + Regrid data and handle masks. + + Assumes that the first two dimensions are the x-y grid. + """ + # This is set to keep consistent with legacy behaviour. + # This is likely to become switchable in the future, see: + # https://github.com/SciTools/iris/issues/5461 + oob_invalid = True + + data_shape = data.shape + if ma.is_masked(data): + unmasked = ~ma.getmaskarray(data) + # Calculate contribution from unmasked sources to each target point. + weight_sums = _standard_regrid_no_masks(unmasked, weights, tgt_shape) + else: + # If there are no masked points then all contributions will be + # from unmasked sources, so we can skip this calculation + weight_sums = np.ones(data_shape[:-2] + tgt_shape) + mdtol = max(mdtol, 1e-8) + tgt_mask = weight_sums > 1 - mdtol + # If out of bounds sources are treated the same as masked sources this + # will already have been calculated above, so we can skip this calculation. + if oob_invalid or not ma.is_masked(data): + # Calculate the proportion of each target cell which is covered by the + # source. For the sake of efficiency, this is calculated for a 2D slice + # which is then broadcast. + inbound_sums = _standard_regrid_no_masks( + np.ones(data_shape[-2:]), weights, tgt_shape + ) + if oob_invalid: + # Legacy behaviour, if the full area of a target cell does not lie + # in bounds it will be masked. + oob_mask = inbound_sums > 1 - 1e-8 + else: + # Note: this code is currently inaccessible. This code exists to lay + # the groundwork for future work which will make out of bounds + # behaviour switchable. + oob_mask = inbound_sums > 1 - mdtol + # Broadcast the mask to the shape of the full array + oob_slice = ((np.newaxis,) * len(data.shape[:-2])) + np.s_[:, :] + tgt_mask = tgt_mask * oob_mask[oob_slice] + + # Calculate normalisations. + normalisations = tgt_mask.astype(weight_sums.dtype) + normalisations[tgt_mask] /= weight_sums[tgt_mask] + + # Mask points in the result. + if ma.isMaskedArray(data): + # If the source is masked, the result should have a similar mask. + fill_value = data.fill_value + normalisations = ma.array( + normalisations, mask=~tgt_mask, fill_value=fill_value + ) + elif np.any(~tgt_mask): + normalisations = ma.array(normalisations, mask=~tgt_mask) + + # Use input cube dtype or convert values to the smallest possible float + # dtype when necessary. + dtype = np.promote_types(data.dtype, np.float16) + + # Perform regridding on unmasked data. + result = _standard_regrid_no_masks( + ma.filled(data, 0.0), weights, tgt_shape + ) + # Apply normalisations and masks to the regridded data. + result = result * normalisations + result = result.astype(dtype) + return result + + +def _regrid_along_dims(data, x_dim, y_dim, weights, tgt_shape, mdtol): + """Regrid data, handling masks and dimensions.""" + # Handle scalar coordinates. + # Note: scalar source coordinates are only handled when their + # corresponding target coordinate is also scalar. + num_scalar_dims = 0 + if x_dim is None: + num_scalar_dims += 1 + data = np.expand_dims(data, -1) + x_dim = -1 + if y_dim is None: + num_scalar_dims += 1 + data = np.expand_dims(data, -1) + y_dim = -1 + if num_scalar_dims == 2: + y_dim = -2 + + # Standard regridding expects the last two dimensions to belong + # to the y and x coordinate and will output as such. + # Axes are moved to account for an arbitrary dimension ordering. + data = np.moveaxis(data, [y_dim, x_dim], [-2, -1]) + result = _standard_regrid(data, weights, tgt_shape, mdtol) + result = np.moveaxis(result, [-2, -1], [y_dim, x_dim]) + + for _ in range(num_scalar_dims): + result = np.squeeze(result, axis=-1) + return result diff --git a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index 93b1a6d3e6..9190548b15 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -601,6 +601,20 @@ def test_circular_subset(self): @tests.skip_data def test_non_circular_subset(self): + """ + Test regridding behaviour when the source grid has circular latitude. + + This tests the specific case when the longitude coordinate of the + source grid has the `circular` attribute as `False` but otherwise spans + the full 360 degrees. + + Note: the previous behaviour was to always mask target cells when they + spanned the boundary of max/min longitude and `circular` was `False`, + however this has been changed so that such cells will only be masked + when there is a gap between max longitude and min longitude. In this + test these cells are expected to be unmasked and therefore the result + will be equal to the above test for circular longitudes. + """ src = iris.tests.stock.global_pp() src.coord("latitude").guess_bounds() src.coord("longitude").guess_bounds() @@ -619,9 +633,53 @@ def test_non_circular_subset(self): dest.add_dim_coord(dest_lat, 0) dest.add_dim_coord(dest_lon, 1) + res = regrid_area_weighted(src, dest) + self.assertArrayShapeStats(res, (40, 7), 285.653960, 15.212710) + + @tests.skip_data + def test__proper_non_circular_subset(self): + """ + Test regridding behaviour when the source grid has circular latitude. + + This tests the specific case when the longitude coordinate of the + source grid does not span the full 360 degrees. Target cells which span + the boundary of max/min longitude will contain a section which is out + of bounds from the source grid and are therefore expected to be masked. + """ + src = iris.tests.stock.global_pp() + src.coord("latitude").guess_bounds() + src.coord("longitude").guess_bounds() + src_lon_bounds = src.coord("longitude").bounds.copy() + # Leave a small gap between the first and last longitude value. + src_lon_bounds[0, 0] += 0.001 + src_lon = src.coord("longitude").copy( + points=src.coord("longitude").points, bounds=src_lon_bounds + ) + src.remove_coord("longitude") + src.add_dim_coord(src_lon, 1) + dest_lat = src.coord("latitude")[0:40] + dest_lon = iris.coords.DimCoord( + [-15.0, -10.0, -5.0, 0.0, 5.0, 10.0, 15.0], + standard_name="longitude", + units="degrees", + coord_system=dest_lat.coord_system, + ) + # Note target grid (in -180 to 180) src in 0 to 360 + dest_lon.guess_bounds() + data = np.zeros((dest_lat.shape[0], dest_lon.shape[0])) + dest = iris.cube.Cube(data) + dest.add_dim_coord(dest_lat, 0) + dest.add_dim_coord(dest_lon, 1) + res = regrid_area_weighted(src, dest) self.assertArrayShapeStats(res, (40, 7), 285.550814, 15.190245) + # The target cells straddling the gap between min and max source + # longitude should be masked. + expected_mask = np.zeros(res.shape) + expected_mask[:, 3] = 1 + assert np.array_equal(expected_mask, res.data.mask) + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py index 2d873ad011..789426e11b 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py +++ b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py @@ -50,7 +50,7 @@ def check_mdtol(self, mdtol=None): _regrid_info = _regrid_area_weighted_rectilinear_src_and_grid__prepare( src_grid, target_grid ) - self.assertEqual(len(_regrid_info), 10) + self.assertEqual(len(_regrid_info), 9) with mock.patch( "iris.analysis._area_weighted." "_regrid_area_weighted_rectilinear_src_and_grid__prepare", From 96a69e443520c372291a539f75e83edeb7a585dd Mon Sep 17 00:00:00 2001 From: stephenworsley <49274989+stephenworsley@users.noreply.github.com> Date: Tue, 21 Nov 2023 09:56:43 +0000 Subject: [PATCH 085/134] add whatsnew (#5596) --- docs/src/whatsnew/latest.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 0b57a75cd7..bad64fccc4 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -72,7 +72,8 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. N/A +#. `@stephenworsley`_ improved the speed of :class:`~iris.analysis.AreaWeighted` + regridding. (:pull:`5543`) 🔥 Deprecations From 5b93be6e1ff0a75bcb5efa801c7ac45370f0548b Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 21 Nov 2023 14:50:59 +0000 Subject: [PATCH 086/134] Mergeback of "Feature _split_attrs" branch (#5152) * Split-attrs: Cube metadata refactortests (#4993) * Convert Test___eq__ to pytest. * Convert Test_combine to pytest. * Convert Test_difference to pytest. * Review changes. * Split attrs - tests for status quo (#4960) * Tests for attribute handling in netcdf load/save. * Tidy test functions. * Fix import order exception. * Add cf-global attributes test. * Towards more pytest-y implemenation. * Replace 'create_testcase' with fixture which also handles temporary directory. * Much tidy; use fixtures to parametrise over multiple attributes. * Fix warnings; begin data-style attrs tests. * Tests for data-style attributes. * Simplify setup fixture + improve docstring. * No parallel test runner, to avoid error for Python>3.8. * Fixed for new-style netcdf module. * Small review changes. * Rename attributes set 'data-style' as 'local-style'. * Simplify use of fixtures; clarify docstrings/comments and improve argument names. * Clarify testing sections for different attribute 'styles'. * Re-enable parallel testing. * Sorted params to avoid parallel testing bug - pytest#432. * Rename test functions to make alpha-order match order in class. * Split netcdf load/save attribute testing into separate sourcefile. * Add tests for loaded cube attributes; refactor to share code between Load and Roundtrip tests. * Add tests for attribute saving. * Fix method names in comments. * Clarify source of Conventions attributes. * Explain the test numbering in TestRoundtrip/TestLoad. * Remove obsolete test helper method. * Fix small typo; Fix numbering of testcases in TestSave. * Implement split cube attributes. (#5040) * Implement split cube attributes. * Test fixes. * Modify examples for simpler metadata printouts. * Added tests, small behaviour fixes. * Simplify copy. * Fix doctests. * Skip doctests with non-replicable outputs (from use of sets). * Tidy test comments, and add extra test. * Tiny typo. * Remove redundant redefinition of Cube.attributes. * Add CubeAttrsDict in module __all__ + improve docs coverage. * Review changes - small test changes. * More review changes. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fix CubeAttrsDict example docstrings. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Odd small fixes. * Improved docstrings and comments; fix doctests. * Don't sidestep netcdf4 thread-safety. * Publicise LimitedAttributeDict, so CubeAttrsDict can refer to it. * Fix various internal + external links. * Update lib/iris/cube.py Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Update lib/iris/cube.py Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Update lib/iris/cube.py Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Update lib/iris/cube.py Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Streamline docs. * Review changes. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Splitattrs ncload (#5384) * Distinguish local+global attributes in netcdf loads. * Small test fixes. * Small doctest fix. * Fix attribute load-save tests for new behaviour, and old-behaviour equivalence. * Split attrs docs (#5418) * Clarification in CubeAttrsDict examples. * CubeAttrsDict fix docstring typo. * Raise awareness of split attributes in user guide. * What's New entry. * Changes to metadata documentation. * Splitattrs ncsave redo (#5410) * Add docs and future switch, no function yet. * Typing enables code completion for Cube.attributes. * Make roundtrip checking more precise + improve some tests accordingly (cf. https://github.com/SciTools/iris/pull/5403). * Rework all tests to use common setup + results-checking code. * Saver supports split-attributes saving (no tests yet). * Tiny docs fix. * Explain test routines better. * Fix init of FUTURE object. * Remove spurious re-test of FUTURE.save_split_attrs. * Don't create Cube attrs of 'None' (n.b. but no effect as currently used). * Remove/repair refs to obsolete routines. * Check all warnings from save operations. * Remove TestSave test numbers. * More save cases: no match with missing, and different cube attribute types. * Run save/roundtrip tests both with+without split saves. * Fix. * Review changes. * Fix changed warning messages. * Move warnings checking from 'run' to 'check' phase. * Simplify and improve warnings checking code. * Fix wrong testcase. * Minor review changes. * Fix reverted code. * Use sets to simplify demoted-attributes code. * WIP * Working with iris 3.6.1, no errors TestSave or TestRoundtrip. * Interim save (incomplete?). * Different results form for split tests; working for roundtrip. * Check that all param lists are sorted. * Check matrix result-files compatibility; add test_save_matrix. * test_load_matrix added; two types of load result. * Finalise special-case attributes. * Small docs tweaks. * Add some more testcases, * Ensure valid sort-order for globals of possibly different types. * Initialise matrix results with legacy values from v3.6.1 -- all matching. * Add full current matrix results, i.e. snapshot current behaviours. * Review changes : rename some matrix testcases, for clarity. * Splitattrs ncsave redo commonmeta (#5538) * Define common-metadata operartions on split attribute dictionaries. * Tests for split-attributes handling in CubeMetadata operations. * Small tidy and clarify. * Common metadata ops support mixed split/unsplit attribute dicts. * Clarify with better naming, comments, docstrings. * Remove split-attrs handling to own sourcefile, and implement as a decorator. * Remove redundant tests duplicated by matrix testcases. * Newstyle split-attrs matrix testing, with fewer testcases. * Small improvements to comments + docstrings. * Fix logic for equals expectation; expand primary/secondary independence test. * Clarify result testing in metadata operations decorator. * Splitattrs equalise (#5586) * Add tests in advance for split-attributes handling cases. * Move dict conversion inside utility, for use elsewhere. * Add support for split-attributes to equalise_attributes. * Update lib/iris/util.py Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Update lib/iris/tests/unit/util/test_equalise_attributes.py Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Simplify and clarify equalise_attributes code. --------- Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Fix merge-fail messaging for attribute mismatches. (#5590) * Extra CubeAttrsDict methods to emulate dictionary behaviours. (#5592) * Extra CubeAttrsDict methods to emulate dictionary behaviours. * Don't use staticmethod on fixture. * Add Iris warning categories to saver warnings. * Type equality fixes for new flake8. * Licence header fixes. * Splitattrs ncsave deprecation (#5595) * Small improvement to split-attrs whatsnew. * Emit deprecation warning when saving without split-attrs enabled. * Stop legacy-split-attribute warnings from upsetting delayed-saving tests. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- docs/src/further_topics/metadata.rst | 41 +- docs/src/userguide/iris_cubes.rst | 5 +- docs/src/whatsnew/latest.rst | 14 +- lib/iris/__init__.py | 17 +- lib/iris/_merge.py | 23 +- lib/iris/common/_split_attribute_dicts.py | 125 ++ lib/iris/common/metadata.py | 47 +- lib/iris/common/mixin.py | 33 +- lib/iris/cube.py | 380 +++- .../fileformats/_nc_load_rules/helpers.py | 4 +- lib/iris/fileformats/netcdf/loader.py | 7 +- lib/iris/fileformats/netcdf/saver.py | 224 ++- .../attrs_matrix_results_load.json | 1019 ++++++++++ .../attrs_matrix_results_roundtrip.json | 983 ++++++++++ .../attrs_matrix_results_save.json | 983 ++++++++++ .../integration/netcdf/test_delayed_save.py | 7 + .../integration/test_netcdf__loadsaveattrs.py | 1678 +++++++++++++++++ lib/iris/tests/test_merge.py | 82 + .../unit/common/metadata/test_CubeMetadata.py | 1193 +++++++----- .../common/mixin/test_LimitedAttributeDict.py | 2 +- lib/iris/tests/unit/cube/test_Cube.py | 28 +- .../tests/unit/cube/test_CubeAttrsDict.py | 407 ++++ .../helpers/test_build_cube_metadata.py | 9 +- .../unit/util/test_equalise_attributes.py | 113 +- lib/iris/util.py | 45 +- 25 files changed, 6945 insertions(+), 524 deletions(-) create mode 100644 lib/iris/common/_split_attribute_dicts.py create mode 100644 lib/iris/tests/integration/attrs_matrix_results_load.json create mode 100644 lib/iris/tests/integration/attrs_matrix_results_roundtrip.json create mode 100644 lib/iris/tests/integration/attrs_matrix_results_save.json create mode 100644 lib/iris/tests/integration/test_netcdf__loadsaveattrs.py create mode 100644 lib/iris/tests/unit/cube/test_CubeAttrsDict.py diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst index a564b2ba68..10efcdf7fe 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -91,6 +91,16 @@ actual `data attribute`_ names of the metadata members on the Iris class. metadata members are Iris specific terms, rather than recognised `CF Conventions`_ terms. +.. note:: + + :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.attributes` implement the + concept of dataset-level and variable-level attributes, to enable correct + NetCDF loading and saving (see :class:`~iris.cube.CubeAttrsDict` and NetCDF + :func:`~iris.fileformats.netcdf.saver.save` for more). ``attributes`` on + the other classes do not have this distinction, but the ``attributes`` + members of ALL the classes still have the same interface, and can be + compared. + Common Metadata API =================== @@ -128,10 +138,12 @@ For example, given the following :class:`~iris.cube.Cube`, source 'Data from Met Office Unified Model 6.05' We can easily get all of the associated metadata of the :class:`~iris.cube.Cube` -using the ``metadata`` property: +using the ``metadata`` property (note the specialised +:class:`~iris.cube.CubeAttrsDict` for the :attr:`~iris.cube.Cube.attributes`, +as mentioned earlier): >>> cube.metadata - CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) + CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes=CubeAttrsDict(globals={'Conventions': 'CF-1.5'}, locals={'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}), cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) We can also inspect the ``metadata`` of the ``longitude`` :class:`~iris.coords.DimCoord` attached to the :class:`~iris.cube.Cube` in the same way: @@ -675,8 +687,8 @@ For example, consider the following :class:`~iris.common.metadata.CubeMetadata`, .. doctest:: metadata-combine - >>> cube.metadata # doctest: +SKIP - CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) + >>> cube.metadata + CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes=CubeAttrsDict(globals={'Conventions': 'CF-1.5'}, locals={'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}), cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) We can perform the **identity function** by comparing the metadata with itself, @@ -701,7 +713,7 @@ which is replaced with a **different value**, >>> metadata != cube.metadata True >>> metadata.combine(cube.metadata) # doctest: +SKIP - CubeMetadata(standard_name=None, long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'STASH': STASH(model=1, section=3, item=236), 'source': 'Data from Met Office Unified Model 6.05', 'Model scenario': 'A1B', 'Conventions': 'CF-1.5'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) + CubeMetadata(standard_name=None, long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05', 'Conventions': 'CF-1.5'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) The ``combine`` method combines metadata by performing a **strict** comparison between each of the associated metadata member values, @@ -724,7 +736,7 @@ Let's reinforce this behaviour, but this time by combining metadata where the >>> metadata != cube.metadata True >>> metadata.combine(cube.metadata).attributes - {'Model scenario': 'A1B'} + CubeAttrsDict(globals={}, locals={'Model scenario': 'A1B'}) The combined result for the ``attributes`` member only contains those **common keys** with **common values**. @@ -810,16 +822,17 @@ the ``from_metadata`` class method. For example, given the following .. doctest:: metadata-convert - >>> cube.metadata # doctest: +SKIP - CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) + >>> cube.metadata + CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes=CubeAttrsDict(globals={'Conventions': 'CF-1.5'}, locals={'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}), cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) We can easily convert it to a :class:`~iris.common.metadata.DimCoordMetadata` instance using ``from_metadata``, .. doctest:: metadata-convert - >>> DimCoordMetadata.from_metadata(cube.metadata) # doctest: +SKIP - DimCoordMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, coord_system=None, climatological=None, circular=None) + >>> newmeta = DimCoordMetadata.from_metadata(cube.metadata) + >>> print(newmeta) + DimCoordMetadata(standard_name=air_temperature, var_name=air_temperature, units=K, attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}) By examining :numref:`metadata members table`, we can see that the :class:`~iris.cube.Cube` and :class:`~iris.coords.DimCoord` container @@ -849,9 +862,9 @@ class instance, .. doctest:: metadata-convert - >>> longitude.metadata.from_metadata(cube.metadata) - DimCoordMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, coord_system=None, climatological=None, circular=None) - + >>> newmeta = longitude.metadata.from_metadata(cube.metadata) + >>> print(newmeta) + DimCoordMetadata(standard_name=air_temperature, var_name=air_temperature, units=K, attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}) .. _metadata assignment: @@ -978,7 +991,7 @@ Indeed, it's also possible to assign to the ``metadata`` property with a >>> longitude.metadata DimCoordMetadata(standard_name='longitude', long_name=None, var_name='longitude', units=Unit('degrees'), attributes={}, coord_system=GeogCS(6371229.0), climatological=False, circular=False) >>> longitude.metadata = cube.metadata - >>> longitude.metadata # doctest: +SKIP + >>> longitude.metadata DimCoordMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, coord_system=GeogCS(6371229.0), climatological=False, circular=False) Note that, only **common** metadata members will be assigned new associated diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/userguide/iris_cubes.rst index 267f97b0fc..03b5093efc 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/userguide/iris_cubes.rst @@ -85,7 +85,10 @@ A cube consists of: data dimensions as the coordinate has dimensions. * an attributes dictionary which, other than some protected CF names, can - hold arbitrary extra metadata. + hold arbitrary extra metadata. This implements the concept of dataset-level + and variable-level attributes when loading and and saving NetCDF files (see + :class:`~iris.cube.CubeAttrsDict` and NetCDF + :func:`~iris.fileformats.netcdf.saver.save` for more). * a list of cell methods to represent operations which have already been applied to the data (e.g. "mean over time") * a list of coordinate "factories" used for deriving coordinates from the diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index bad64fccc4..93919216c7 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -29,6 +29,16 @@ This document explains the changes made to Iris for this release ✨ Features =========== +#. `@pp-mo`_, `@lbdreyer`_ and `@trexfeathers`_ improved + :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.attributes` handling to + better preserve the distinction between dataset-level and variable-level + attributes, allowing file-Cube-file round-tripping of NetCDF attributes. See + :class:`~iris.cube.CubeAttrsDict`, NetCDF + :func:`~iris.fileformats.netcdf.saver.save` and :data:`~iris.Future` for more. + (:pull:`5152`, `split attributes project`_) + +#. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles + lazy data. (:pull:`5307`) #. `@trexfeathers`_ and `@HGWright`_ (reviewer) sub-categorised all Iris' :class:`UserWarning`\s for richer filtering. The full index of @@ -45,7 +55,7 @@ This document explains the changes made to Iris for this release the year of December) instead of the following year (the default behaviour). (:pull:`5573`) - #. `@HGWright`_ added :attr:`~iris.coords.Coord.ignore_axis` to allow manual +#. `@HGWright`_ added :attr:`~iris.coords.Coord.ignore_axis` to allow manual intervention preventing :func:`~iris.util.guess_coord_axis` from acting on a coordinate. (:pull:`5551`) @@ -152,4 +162,4 @@ This document explains the changes made to Iris for this release .. _NEP29 Drop Schedule: https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule .. _codespell: https://github.com/codespell-project/codespell - +.. _split attributes project: https://github.com/orgs/SciTools/projects/5?pane=info diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index c29998cd6d..a10169b7bb 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -141,7 +141,9 @@ def callback(cube, field, filename): class Future(threading.local): """Run-time configuration controller.""" - def __init__(self, datum_support=False, pandas_ndim=False): + def __init__( + self, datum_support=False, pandas_ndim=False, save_split_attrs=False + ): """ A container for run-time options controls. @@ -163,6 +165,11 @@ def __init__(self, datum_support=False, pandas_ndim=False): pandas_ndim : bool, default=False See :func:`iris.pandas.as_data_frame` for details - opts in to the newer n-dimensional behaviour. + save_split_attrs : bool, default=False + Save "global" and "local" cube attributes to netcdf in appropriately + different ways : "global" ones are saved as dataset attributes, where + possible, while "local" ones are saved as data-variable attributes. + See :func:`iris.fileformats.netcdf.saver.save`. """ # The flag 'example_future_flag' is provided as a reference for the @@ -174,14 +181,18 @@ def __init__(self, datum_support=False, pandas_ndim=False): # self.__dict__['example_future_flag'] = example_future_flag self.__dict__["datum_support"] = datum_support self.__dict__["pandas_ndim"] = pandas_ndim + self.__dict__["save_split_attrs"] = save_split_attrs + # TODO: next major release: set IrisDeprecation to subclass # DeprecationWarning instead of UserWarning. def __repr__(self): # msg = ('Future(example_future_flag={})') # return msg.format(self.example_future_flag) - msg = "Future(datum_support={}, pandas_ndim={})" - return msg.format(self.datum_support, self.pandas_ndim) + msg = "Future(datum_support={}, pandas_ndim={}, save_split_attrs={})" + return msg.format( + self.datum_support, self.pandas_ndim, self.save_split_attrs + ) # deprecated_options = {'example_future_flag': 'warning',} deprecated_options = {} diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index bf22f57887..a8f079e70e 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -22,6 +22,9 @@ multidim_lazy_stack, ) from iris.common import CoordMetadata, CubeMetadata +from iris.common._split_attribute_dicts import ( + _convert_splitattrs_to_pairedkeys_dict as convert_splitattrs_to_pairedkeys_dict, +) import iris.coords import iris.cube import iris.exceptions @@ -390,8 +393,10 @@ def _defn_msgs(self, other_defn): ) ) if self_defn.attributes != other_defn.attributes: - diff_keys = set(self_defn.attributes.keys()) ^ set( - other_defn.attributes.keys() + attrs_1, attrs_2 = self_defn.attributes, other_defn.attributes + diff_keys = sorted( + set(attrs_1.globals) ^ set(attrs_2.globals) + | set(attrs_1.locals) ^ set(attrs_2.locals) ) if diff_keys: msgs.append( @@ -399,14 +404,16 @@ def _defn_msgs(self, other_defn): + ", ".join(repr(key) for key in diff_keys) ) else: + attrs_1, attrs_2 = [ + convert_splitattrs_to_pairedkeys_dict(dic) + for dic in (attrs_1, attrs_2) + ] diff_attrs = [ - repr(key) - for key in self_defn.attributes - if np.all( - self_defn.attributes[key] != other_defn.attributes[key] - ) + repr(key[1]) + for key in attrs_1 + if np.all(attrs_1[key] != attrs_2[key]) ] - diff_attrs = ", ".join(diff_attrs) + diff_attrs = ", ".join(sorted(diff_attrs)) msgs.append( "cube.attributes values differ for keys: {}".format( diff_attrs diff --git a/lib/iris/common/_split_attribute_dicts.py b/lib/iris/common/_split_attribute_dicts.py new file mode 100644 index 0000000000..3927974053 --- /dev/null +++ b/lib/iris/common/_split_attribute_dicts.py @@ -0,0 +1,125 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +""" +Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute +dictionaries. + +The idea here is to convert a split-dictionary into a "plain" one for calculations, +whose keys are all pairs of the form ('global', ) or ('local', ). +And to convert back again after the operation, if the result is a dictionary. + +For "strict" operations this clearly does all that is needed. For lenient ones, +we _might_ want for local+global attributes of the same name to interact. +However, on careful consideration, it seems that this is not actually desirable for +any of the common-metadata operations. +So, we simply treat "global" and "local" attributes of the same name as entirely +independent. Which happily is also the easiest to code, and to explain. +""" +from collections.abc import Mapping, Sequence +from functools import wraps + + +def _convert_splitattrs_to_pairedkeys_dict(dic): + """ + Convert a split-attributes dictionary to a "normal" dict. + + Transform a :class:`~iris.cube.CubeAttributesDict` "split" attributes dictionary + into a 'normal' :class:`dict`, with paired keys of the form ('global', name) or + ('local', name). + + If the input is *not* a split-attrs dict, it is converted to one before + transforming it. This will assign its keys to global/local depending on a standard + set of choices (see :class:`~iris.cube.CubeAttributesDict`). + """ + from iris.cube import CubeAttrsDict + + # Convert input to CubeAttrsDict + if not hasattr(dic, "globals") or not hasattr(dic, "locals"): + dic = CubeAttrsDict(dic) + + def _global_then_local_items(dic): + # Routine to produce global, then local 'items' in order, and with all keys + # "labelled" as local or global type, to ensure they are all unique. + for key, value in dic.globals.items(): + yield ("global", key), value + for key, value in dic.locals.items(): + yield ("local", key), value + + return dict(_global_then_local_items(dic)) + + +def _convert_pairedkeys_dict_to_splitattrs(dic): + """ + Convert an input with global/local paired keys back into a split-attrs dict. + + For now, this is always and only a :class:`iris.cube.CubeAttrsDict`. + """ + from iris.cube import CubeAttrsDict + + result = CubeAttrsDict() + for key, value in dic.items(): + keytype, keyname = key + if keytype == "global": + result.globals[keyname] = value + else: + assert keytype == "local" + result.locals[keyname] = value + return result + + +def adjust_for_split_attribute_dictionaries(operation): + """ + Decorator to make a function of attribute-dictionaries work with split attributes. + + The wrapped function of attribute-dictionaries is currently always one of "equals", + "combine" or "difference", with signatures like : + equals(left: dict, right: dict) -> bool + combine(left: dict, right: dict) -> dict + difference(left: dict, right: dict) -> None | (dict, dict) + + The results of the wrapped operation are either : + * for "equals" (or "__eq__") : a boolean + * for "combine" : a (converted) attributes-dictionary + * for "difference" : a list of (None or "pair"), where a pair contains two + dictionaries + + Before calling the wrapped operation, its inputs (left, right) are modified by + converting any "split" dictionaries to a form where the keys are pairs + of the form ("global", name) or ("local", name). + + After calling the wrapped operation, for "combine" or "difference", the result can + contain a dictionary or dictionaries. These are then transformed back from the + 'converted' form to split-attribute dictionaries, before returning. + + "Split" dictionaries are all of class :class:`~iris.cube.CubeAttrsDict`, since + the only usage of 'split' attribute dictionaries is in Cubes (i.e. they are not + used for cube components). + """ + + @wraps(operation) + def _inner_function(*args, **kwargs): + # Convert all inputs into 'pairedkeys' type dicts + args = [_convert_splitattrs_to_pairedkeys_dict(arg) for arg in args] + + result = operation(*args, **kwargs) + + # Convert known specific cases of 'pairedkeys' dicts in the result, and convert + # those back into split-attribute dictionaries. + if isinstance(result, Mapping): + # Fix a result which is a single dictionary -- for "combine" + result = _convert_pairedkeys_dict_to_splitattrs(result) + elif isinstance(result, Sequence) and len(result) == 2: + # Fix a result which is a pair of dictionaries -- for "difference" + left, right = result + left, right = ( + _convert_pairedkeys_dict_to_splitattrs(left), + _convert_pairedkeys_dict_to_splitattrs(right), + ) + result = result.__class__([left, right]) + # ELSE: leave other types of result unchanged. E.G. None, bool + + return result + + return _inner_function diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 8d60171331..f88a2e57b5 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -20,6 +20,7 @@ from xxhash import xxh64_hexdigest from ..config import get_logger +from ._split_attribute_dicts import adjust_for_split_attribute_dictionaries from .lenient import _LENIENT from .lenient import _lenient_service as lenient_service from .lenient import _qualname as qualname @@ -241,7 +242,11 @@ def __str__(self): field_strings = [] for field in self._fields: value = getattr(self, field) - if value is None or isinstance(value, (str, dict)) and not value: + if ( + value is None + or isinstance(value, (str, Mapping)) + and not value + ): continue field_strings.append(f"{field}={value}") @@ -1250,6 +1255,46 @@ def _check(item): return result + # + # Override each of the attribute-dict operations in BaseMetadata, to enable + # them to deal with split-attribute dictionaries correctly. + # There are 6 of these, for (equals/combine/difference) * (lenient/strict). + # Each is overridden with a *wrapped* version of the parent method, using the + # "@adjust_for_split_attribute_dictionaries" decorator, which converts any + # split-attribute dictionaries in the inputs to ordinary dicts, and likewise + # re-converts any dictionaries in the return value. + # + + @staticmethod + @adjust_for_split_attribute_dictionaries + def _combine_lenient_attributes(left, right): + return BaseMetadata._combine_lenient_attributes(left, right) + + @staticmethod + @adjust_for_split_attribute_dictionaries + def _combine_strict_attributes(left, right): + return BaseMetadata._combine_strict_attributes(left, right) + + @staticmethod + @adjust_for_split_attribute_dictionaries + def _compare_lenient_attributes(left, right): + return BaseMetadata._compare_lenient_attributes(left, right) + + @staticmethod + @adjust_for_split_attribute_dictionaries + def _compare_strict_attributes(left, right): + return BaseMetadata._compare_strict_attributes(left, right) + + @staticmethod + @adjust_for_split_attribute_dictionaries + def _difference_lenient_attributes(left, right): + return BaseMetadata._difference_lenient_attributes(left, right) + + @staticmethod + @adjust_for_split_attribute_dictionaries + def _difference_strict_attributes(left, right): + return BaseMetadata._difference_strict_attributes(left, right) + class DimCoordMetadata(CoordMetadata): """ diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index f3b42fc02d..a1b1e4647b 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -16,7 +16,7 @@ from .metadata import BaseMetadata -__all__ = ["CFVariableMixin"] +__all__ = ["CFVariableMixin", "LimitedAttributeDict"] def _get_valid_standard_name(name): @@ -52,7 +52,29 @@ def _get_valid_standard_name(name): class LimitedAttributeDict(dict): - _forbidden_keys = ( + """ + A specialised 'dict' subclass, which forbids (errors) certain attribute names. + + Used for the attribute dictionaries of all Iris data objects (that is, + :class:`CFVariableMixin` and its subclasses). + + The "excluded" attributes are those which either :mod:`netCDF4` or Iris intpret and + control with special meaning, which therefore should *not* be defined as custom + 'user' attributes on Iris data objects such as cubes. + + For example : "coordinates", "grid_mapping", "scale_factor". + + The 'forbidden' attributes are those listed in + :data:`iris.common.mixin.LimitedAttributeDict.CF_ATTRS_FORBIDDEN` . + + All the forbidden attributes are amongst those listed in + `Appendix A of the CF Conventions: `_ + -- however, not *all* of them, since not all are interpreted by Iris. + + """ + + #: Attributes with special CF meaning, forbidden in Iris attribute dictionaries. + CF_ATTRS_FORBIDDEN = ( "standard_name", "long_name", "units", @@ -77,7 +99,7 @@ def __init__(self, *args, **kwargs): dict.__init__(self, *args, **kwargs) # Check validity of keys for key in self.keys(): - if key in self._forbidden_keys: + if key in self.CF_ATTRS_FORBIDDEN: raise ValueError(f"{key!r} is not a permitted attribute") def __eq__(self, other): @@ -98,11 +120,12 @@ def __ne__(self, other): return not self == other def __setitem__(self, key, value): - if key in self._forbidden_keys: + if key in self.CF_ATTRS_FORBIDDEN: raise ValueError(f"{key!r} is not a permitted attribute") dict.__setitem__(self, key, value) def update(self, other, **kwargs): + """Standard ``dict.update()`` operation.""" # Gather incoming keys keys = [] if hasattr(other, "keys"): @@ -114,7 +137,7 @@ def update(self, other, **kwargs): # Check validity of keys for key in keys: - if key in self._forbidden_keys: + if key in self.CF_ATTRS_FORBIDDEN: raise ValueError(f"{key!r} is not a permitted attribute") dict.update(self, other, **kwargs) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 3a36a035c0..8aa0b452d5 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -9,11 +9,20 @@ """ from collections import OrderedDict -from collections.abc import Container, Iterable, Iterator, MutableMapping import copy from copy import deepcopy from functools import partial, reduce +import itertools import operator +from typing import ( + Container, + Iterable, + Iterator, + Mapping, + MutableMapping, + Optional, + Union, +) import warnings from xml.dom.minidom import Document import zlib @@ -34,12 +43,13 @@ import iris.aux_factory from iris.common import CFVariableMixin, CubeMetadata, metadata_manager_factory from iris.common.metadata import metadata_filter +from iris.common.mixin import LimitedAttributeDict import iris.coord_systems import iris.coords import iris.exceptions import iris.util -__all__ = ["Cube", "CubeList"] +__all__ = ["Cube", "CubeAttrsDict", "CubeList"] # The XML namespace to use for CubeML documents @@ -789,6 +799,352 @@ def _is_single_item(testee): return isinstance(testee, str) or not isinstance(testee, Iterable) +class CubeAttrsDict(MutableMapping): + """ + A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, + providing unified user access to combined cube "local" and "global" attributes + dictionaries, with the access behaviour of an ordinary (single) dictionary. + + Properties :attr:`globals` and :attr:`locals` are regular + :class:`~iris.common.mixin.LimitedAttributeDict`\\s, which can be accessed and + modified separately. The :class:`CubeAttrsDict` itself contains *no* additional + state, but simply provides a 'combined' view of both global + local attributes. + + All the read- and write-type methods, such as ``get()``, ``update()``, ``values()``, + behave according to the logic documented for : :meth:`__getitem__`, + :meth:`__setitem__` and :meth:`__iter__`. + + Notes + ----- + For type testing, ``issubclass(CubeAttrsDict, Mapping)`` is ``True``, but + ``issubclass(CubeAttrsDict, dict)`` is ``False``. + + Examples + -------- + + >>> from iris.cube import Cube + >>> cube = Cube([0]) + >>> # CF defines 'history' as global by default. + >>> cube.attributes.update({"history": "from test-123", "mycode": 3}) + >>> print(cube.attributes) + {'history': 'from test-123', 'mycode': 3} + >>> print(repr(cube.attributes)) + CubeAttrsDict(globals={'history': 'from test-123'}, locals={'mycode': 3}) + + >>> cube.attributes['history'] += ' +added' + >>> print(repr(cube.attributes)) + CubeAttrsDict(globals={'history': 'from test-123 +added'}, locals={'mycode': 3}) + + >>> cube.attributes.locals['history'] = 'per-variable' + >>> print(cube.attributes) + {'history': 'per-variable', 'mycode': 3} + >>> print(repr(cube.attributes)) + CubeAttrsDict(globals={'history': 'from test-123 +added'}, locals={'mycode': 3, 'history': 'per-variable'}) + + """ + + # TODO: Create a 'further topic' / 'tech paper' on NetCDF I/O, including + # discussion of attribute handling. + + def __init__( + self, + combined: Optional[Union[Mapping, str]] = "__unspecified", + locals: Optional[Mapping] = None, + globals: Optional[Mapping] = None, + ): + """ + Create a cube attributes dictionary. + + We support initialisation from a single generic mapping input, using the default + global/local assignment rules explained at :meth:`__setattr__`, or from + two separate mappings. Two separate dicts can be passed in the ``locals`` + and ``globals`` args, **or** via a ``combined`` arg which has its own + ``.globals`` and ``.locals`` properties -- so this allows passing an existing + :class:`CubeAttrsDict`, which will be copied. + + Parameters + ---------- + combined : dict + values to init both 'self.globals' and 'self.locals'. If 'combined' itself + has attributes named 'locals' and 'globals', these are used to update the + respective content (after initially setting the individual ones). + Otherwise, 'combined' is treated as a generic mapping, applied as + ``self.update(combined)``, + i.e. it will set locals and/or globals with the same logic as + :meth:`~iris.cube.CubeAttrsDict.__setitem__` . + locals : dict + initial content for 'self.locals' + globals : dict + initial content for 'self.globals' + + Examples + -------- + + >>> from iris.cube import CubeAttrsDict + >>> # CF defines 'history' as global by default. + >>> CubeAttrsDict({'history': 'data-story', 'comment': 'this-cube'}) + CubeAttrsDict(globals={'history': 'data-story'}, locals={'comment': 'this-cube'}) + + >>> CubeAttrsDict(locals={'history': 'local-history'}) + CubeAttrsDict(globals={}, locals={'history': 'local-history'}) + + >>> CubeAttrsDict(globals={'x': 'global'}, locals={'x': 'local'}) + CubeAttrsDict(globals={'x': 'global'}, locals={'x': 'local'}) + + >>> x1 = CubeAttrsDict(globals={'x': 1}, locals={'y': 2}) + >>> x2 = CubeAttrsDict(x1) + >>> x2 + CubeAttrsDict(globals={'x': 1}, locals={'y': 2}) + + """ + # First initialise locals + globals, defaulting to empty. + self.locals = locals + self.globals = globals + # Update with combined, if present. + if not isinstance(combined, str) or combined != "__unspecified": + # Treat a single input with 'locals' and 'globals' properties as an + # existing CubeAttrsDict, and update from its content. + # N.B. enforce deep copying, consistent with general Iris usage. + if hasattr(combined, "globals") and hasattr(combined, "locals"): + # Copy a mapping with globals/locals, like another 'CubeAttrsDict' + self.globals.update(deepcopy(combined.globals)) + self.locals.update(deepcopy(combined.locals)) + else: + # Treat any arbitrary single input value as a mapping (dict), and + # update from it. + self.update(dict(deepcopy(combined))) + + # + # Ensure that the stored local/global dictionaries are "LimitedAttributeDicts". + # + @staticmethod + def _normalise_attrs( + attributes: Optional[Mapping], + ) -> LimitedAttributeDict: + # Convert an input attributes arg into a standard form. + # N.B. content is always a LimitedAttributeDict, and a deep copy of input. + # Allow arg of None, etc. + if not attributes: + attributes = {} + else: + attributes = deepcopy(attributes) + + # Ensure the expected mapping type. + attributes = LimitedAttributeDict(attributes) + return attributes + + @property + def locals(self) -> LimitedAttributeDict: + return self._locals + + @locals.setter + def locals(self, attributes: Optional[Mapping]): + self._locals = self._normalise_attrs(attributes) + + @property + def globals(self) -> LimitedAttributeDict: + return self._globals + + @globals.setter + def globals(self, attributes: Optional[Mapping]): + self._globals = self._normalise_attrs(attributes) + + # + # Provide a serialisation interface + # + def __getstate__(self): + return (self.locals, self.globals) + + def __setstate__(self, state): + self.locals, self.globals = state + + # + # Support comparison -- required because default operation only compares a single + # value at each key. + # + def __eq__(self, other): + # For equality, require both globals + locals to match exactly. + # NOTE: array content works correctly, since 'locals' and 'globals' are always + # iris.common.mixin.LimitedAttributeDict, which gets this right. + other = CubeAttrsDict(other) + result = self.locals == other.locals and self.globals == other.globals + return result + + # + # Provide methods duplicating those for a 'dict', but which are *not* provided by + # MutableMapping, for compatibility with code which expected a cube.attributes to be + # a :class:`~iris.common.mixin.LimitedAttributeDict`. + # The extra required methods are : + # 'copy', 'update', '__ior__', '__or__', '__ror__' and 'fromkeys'. + # + def copy(self): + """ + Return a copy. + + Implemented with deep copying, consistent with general Iris usage. + + """ + return CubeAttrsDict(self) + + def update(self, *args, **kwargs): + """ + Update by adding items from a mapping arg, or keyword-values. + + If the argument is a split dictionary, preserve the local/global nature of its + keys. + """ + if args and hasattr(args[0], "globals") and hasattr(args[0], "locals"): + dic = args[0] + self.globals.update(dic.globals) + self.locals.update(dic.locals) + else: + super().update(*args) + super().update(**kwargs) + + def __or__(self, arg): + """Implement 'or' via 'update'.""" + if not isinstance(arg, Mapping): + return NotImplemented + new_dict = self.copy() + new_dict.update(arg) + return new_dict + + def __ior__(self, arg): + """Implement 'ior' via 'update'.""" + self.update(arg) + return self + + def __ror__(self, arg): + """ + Implement 'ror' via 'update'. + + This needs to promote, such that the result is a CubeAttrsDict. + """ + if not isinstance(arg, Mapping): + return NotImplemented + result = CubeAttrsDict(arg) + result.update(self) + return result + + @classmethod + def fromkeys(cls, iterable, value=None): + """ + Create a new object with keys taken from an argument, all set to one value. + + If the argument is a split dictionary, preserve the local/global nature of its + keys. + """ + if hasattr(iterable, "globals") and hasattr(iterable, "locals"): + # When main input is a split-attrs dict, create global/local parts from its + # global/local keys + result = cls( + globals=dict.fromkeys(iterable.globals, value), + locals=dict.fromkeys(iterable.locals, value), + ) + else: + # Create from a dict.fromkeys, using default classification of the keys. + result = cls(dict.fromkeys(iterable, value)) + return result + + # + # The remaining methods are sufficient to generate a complete standard Mapping + # API. See - + # https://docs.python.org/3/reference/datamodel.html#emulating-container-types. + # + + def __iter__(self): + """ + Define the combined iteration order. + + Result is: all global keys, then all local ones, but omitting duplicates. + + """ + # NOTE: this means that in the "summary" view, attributes present in both + # locals+globals are listed first, amongst the globals, even though they appear + # with the *value* from locals. + # Otherwise follows order of insertion, as is normal for dicts. + return itertools.chain( + self.globals.keys(), + (x for x in self.locals.keys() if x not in self.globals), + ) + + def __len__(self): + # Return the number of keys in the 'combined' view. + return len(list(iter(self))) + + def __getitem__(self, key): + """ + Fetch an item from the "combined attributes". + + If the name is present in *both* ``self.locals`` and ``self.globals``, then + the local value is returned. + + """ + if key in self.locals: + store = self.locals + else: + store = self.globals + return store[key] + + def __setitem__(self, key, value): + """ + Assign an attribute value. + + This may be assigned in either ``self.locals`` or ``self.globals``, chosen as + follows: + + * If there is an existing setting in either ``.locals`` or ``.globals``, then + that is updated (i.e. overwritten). + + * If it is present in *both*, only + ``.locals`` is updated. + + * If there is *no* existing attribute, it is usually created in ``.locals``. + **However** a handful of "known normally global" cases, as defined by CF, + go into ``.globals`` instead. + At present these are : ('conventions', 'featureType', 'history', 'title'). + See `CF Conventions, Appendix A: `_ . + + """ + # If an attribute of this name is already present, update that + # (the local one having priority). + if key in self.locals: + store = self.locals + elif key in self.globals: + store = self.globals + else: + # If NO existing attribute, create local unless it is a "known global" one. + from iris.fileformats.netcdf.saver import _CF_GLOBAL_ATTRS + + if key in _CF_GLOBAL_ATTRS: + store = self.globals + else: + store = self.locals + + store[key] = value + + def __delitem__(self, key): + """ + Remove an attribute. + + Delete from both local + global. + + """ + if key in self.locals: + del self.locals[key] + if key in self.globals: + del self.globals[key] + + def __str__(self): + # Print it just like a "normal" dictionary. + # Convert to a normal dict to do that. + return str(dict(self)) + + def __repr__(self): + # Special repr form, showing "real" contents. + return f"CubeAttrsDict(globals={self.globals}, locals={self.locals})" + + class Cube(CFVariableMixin): """ A single Iris cube of data and metadata. @@ -985,8 +1341,8 @@ def __init__( self.cell_methods = cell_methods - #: A dictionary, with a few restricted keys, for arbitrary - #: Cube metadata. + #: A dictionary for arbitrary Cube metadata. + #: A few keys are restricted - see :class:`CubeAttrsDict`. self.attributes = attributes # Coords @@ -1044,6 +1400,22 @@ def _names(self): """ return self._metadata_manager._names + # + # Ensure that .attributes is always a :class:`CubeAttrsDict`. + # + @property + def attributes(self) -> CubeAttrsDict: + return super().attributes + + @attributes.setter + def attributes(self, attributes: Optional[Mapping]): + """ + An override to CfVariableMixin.attributes.setter, which ensures that Cube + attributes are stored in a way which distinguishes global + local ones. + + """ + self._metadata_manager.attributes = CubeAttrsDict(attributes or {}) + def _dimensional_metadata(self, name_or_dimensional_metadata): """ Return a single _DimensionalMetadata instance that matches the given diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 71e59feda0..7044b3a993 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -482,9 +482,9 @@ def build_cube_metadata(engine): # Set the cube global attributes. for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): try: - cube.attributes[str(attr_name)] = attr_value + cube.attributes.globals[str(attr_name)] = attr_value except ValueError as e: - msg = "Skipping global attribute {!r}: {}" + msg = "Skipping disallowed global attribute {!r}: {}" warnings.warn( msg.format(attr_name, str(e)), category=_WarnComboIgnoringLoad, diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 030427a2b9..f0ed111687 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -167,8 +167,13 @@ def attribute_predicate(item): return item[0] not in _CF_ATTRS tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused()) + attrs_dict = iris_object.attributes + if hasattr(attrs_dict, "locals"): + # Treat cube attributes (i.e. a CubeAttrsDict) as a special case. + # These attrs are "local" (i.e. on the variable), so record them as such. + attrs_dict = attrs_dict.locals for attr_name, attr_value in tmpvar: - _set_attributes(iris_object.attributes, attr_name, attr_value) + _set_attributes(attrs_dict, attr_name, attr_value) def _get_actual_dtype(cf_var): diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 6409d8c311..fcbc9a5383 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -27,6 +27,7 @@ from dask.delayed import Delayed import numpy as np +from iris._deprecation import warn_deprecated from iris._lazy_data import _co_realise_lazy_arrays, is_lazy_data from iris.aux_factory import ( AtmosphereSigmaFactory, @@ -560,6 +561,10 @@ def write( matching keys will become attributes on the data variable rather than global attributes. + .. Note:: + + Has no effect if :attr:`iris.FUTURE.save_split_attrs` is ``True``. + * unlimited_dimensions (iterable of strings and/or :class:`iris.coords.Coord` objects): List of coordinate names (or coordinate objects) @@ -652,6 +657,9 @@ def write( 3 files that do not use HDF5. """ + # TODO: when iris.FUTURE.save_split_attrs defaults to True, we can deprecate the + # "local_keys" arg, and finally remove it when we finally remove the + # save_split_attrs switch. if unlimited_dimensions is None: unlimited_dimensions = [] @@ -728,20 +736,23 @@ def write( # aux factory in the cube. self._add_aux_factories(cube, cf_var_cube, cube_dimensions) - # Add data variable-only attribute names to local_keys. - if local_keys is None: - local_keys = set() - else: - local_keys = set(local_keys) - local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS) - - # Add global attributes taking into account local_keys. - global_attributes = { - k: v - for k, v in cube.attributes.items() - if (k not in local_keys and k.lower() != "conventions") - } - self.update_global_attributes(global_attributes) + if not iris.FUTURE.save_split_attrs: + # In the "old" way, we update global attributes as we go. + # Add data variable-only attribute names to local_keys. + if local_keys is None: + local_keys = set() + else: + local_keys = set(local_keys) + local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS) + + # Add global attributes taking into account local_keys. + cube_attributes = cube.attributes + global_attributes = { + k: v + for k, v in cube_attributes.items() + if (k not in local_keys and k.lower() != "conventions") + } + self.update_global_attributes(global_attributes) if cf_profile_available: cf_patch = iris.site_configuration.get("cf_patch") @@ -797,6 +808,9 @@ def update_global_attributes(self, attributes=None, **kwargs): CF global attributes to be updated. """ + # TODO: when when iris.FUTURE.save_split_attrs is removed, this routine will + # only be called once: it can reasonably be renamed "_set_global_attributes", + # and the 'kwargs' argument can be removed. if attributes is not None: # Handle sequence e.g. [('fruit', 'apple'), ...]. if not hasattr(attributes, "keys"): @@ -2266,6 +2280,8 @@ def _create_cf_data_variable( """ Create CF-netCDF data variable for the cube and any associated grid mapping. + # TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can + # be removed. Args: @@ -2290,6 +2306,8 @@ def _create_cf_data_variable( The newly created CF-netCDF data variable. """ + # TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can + # be removed. # Get the values in a form which is valid for the file format. data = self._ensure_valid_dtype(cube.core_data(), "cube", cube) @@ -2378,16 +2396,20 @@ def set_packing_ncattrs(cfvar): if cube.units.calendar: _setncattr(cf_var, "calendar", cube.units.calendar) - # Add data variable-only attribute names to local_keys. - if local_keys is None: - local_keys = set() + if iris.FUTURE.save_split_attrs: + attr_names = cube.attributes.locals.keys() else: - local_keys = set(local_keys) - local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS) + # Add data variable-only attribute names to local_keys. + if local_keys is None: + local_keys = set() + else: + local_keys = set(local_keys) + local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS) + + # Add any cube attributes whose keys are in local_keys as + # CF-netCDF data variable attributes. + attr_names = set(cube.attributes).intersection(local_keys) - # Add any cube attributes whose keys are in local_keys as - # CF-netCDF data variable attributes. - attr_names = set(cube.attributes).intersection(local_keys) for attr_name in sorted(attr_names): # Do not output 'conventions' attribute. if attr_name.lower() == "conventions": @@ -2673,9 +2695,15 @@ def save( Save cube(s) to a netCDF file, given the cube and the filename. * Iris will write CF 1.7 compliant NetCDF files. - * The attributes dictionaries on each cube in the saved cube list - will be compared and common attributes saved as NetCDF global - attributes where appropriate. + * **If split-attribute saving is disabled**, i.e. + :data:`iris.FUTURE`\\ ``.save_split_attrs`` is ``False``, then attributes + dictionaries on each cube in the saved cube list will be compared, and common + attributes saved as NetCDF global attributes where appropriate. + + Or, **when split-attribute saving is enabled**, then ``cube.attributes.locals`` + are always saved as attributes of data-variables, and ``cube.attributes.globals`` + are saved as global (dataset) attributes, where possible. + Since the 2 types are now distinguished : see :class:`~iris.cube.CubeAttrsDict`. * Keyword arguments specifying how to save the data are applied to each cube. To use different settings for different cubes, use the NetCDF Context manager (:class:`~Saver`) directly. @@ -2708,6 +2736,8 @@ def save( An interable of cube attribute keys. Any cube attributes with matching keys will become attributes on the data variable rather than global attributes. + **NOTE:** this is *ignored* if 'split-attribute saving' is **enabled**, + i.e. when ``iris.FUTURE.save_split_attrs`` is ``True``. * unlimited_dimensions (iterable of strings and/or :class:`iris.coords.Coord` objects): @@ -2846,26 +2876,127 @@ def save( else: cubes = cube - if local_keys is None: + # Decide which cube attributes will be saved as "global" attributes + # NOTE: in 'legacy' mode, when iris.FUTURE.save_split_attrs == False, this code + # section derives a common value for 'local_keys', which is passed to 'Saver.write' + # when saving each input cube. The global attributes are then created by a call + # to "Saver.update_global_attributes" within each 'Saver.write' call (which is + # obviously a bit redundant!), plus an extra one to add 'Conventions'. + # HOWEVER, in `split_attrs` mode (iris.FUTURE.save_split_attrs == False), this code + # instead constructs a 'global_attributes' dictionary, and outputs that just once, + # after writing all the input cubes. + if iris.FUTURE.save_split_attrs: + # We don't actually use 'local_keys' in this case. + # TODO: can remove this when the iris.FUTURE.save_split_attrs is removed. local_keys = set() + + # Find any collisions in the cube global attributes and "demote" all those to + # local attributes (where possible, else warn they are lost). + # N.B. "collision" includes when not all cubes *have* that attribute. + global_names = set() + for cube in cubes: + global_names |= set(cube.attributes.globals.keys()) + + # Fnd any global attributes which are not the same on *all* cubes. + def attr_values_equal(val1, val2): + # An equality test which also works when some values are numpy arrays (!) + # As done in :meth:`iris.common.mixin.LimitedAttributeDict.__eq__`. + match = val1 == val2 + try: + match = bool(match) + except ValueError: + match = match.all() + return match + + cube0 = cubes[0] + invalid_globals = set( + [ + attrname + for attrname in global_names + if not all( + attr_values_equal( + cube.attributes.globals.get(attrname), + cube0.attributes.globals.get(attrname), + ) + for cube in cubes[1:] + ) + ] + ) + + # Establish all the global attributes which we will write to the file (at end). + global_attributes = { + attr: cube0.attributes.globals.get(attr) + for attr in global_names - invalid_globals + } + if invalid_globals: + # Some cubes have different global attributes: modify cubes as required. + warnings.warn( + f"Saving the cube global attributes {sorted(invalid_globals)} as local " + "(i.e. data-variable) attributes, where possible, since they are not " + "the same on all input cubes.", + category=iris.exceptions.IrisSaveWarning, + ) + cubes = cubes.copy() # avoiding modifying the actual input arg. + for i_cube in range(len(cubes)): + # We iterate over cube *index*, so we can replace the list entries with + # with cube *copies* -- just to avoid changing our call args. + cube = cubes[i_cube] + demote_attrs = set(cube.attributes.globals) & invalid_globals + if any(demote_attrs): + # Catch any demoted attrs where there is already a local version + blocked_attrs = demote_attrs & set(cube.attributes.locals) + if blocked_attrs: + warnings.warn( + f"Global cube attributes {sorted(blocked_attrs)} " + f'of cube "{cube.name()}" were not saved, overlaid ' + "by existing local attributes with the same names.", + category=iris.exceptions.IrisSaveWarning, + ) + demote_attrs -= blocked_attrs + if demote_attrs: + # This cube contains some 'demoted' global attributes. + # Replace input cube with a copy, so we can modify attributes. + cube = cube.copy() + cubes[i_cube] = cube + for attr in demote_attrs: + # move global to local + value = cube.attributes.globals.pop(attr) + cube.attributes.locals[attr] = value + else: - local_keys = set(local_keys) - - # Determine the attribute keys that are common across all cubes and - # thereby extend the collection of local_keys for attributes - # that should be attributes on data variables. - attributes = cubes[0].attributes - common_keys = set(attributes) - for cube in cubes[1:]: - keys = set(cube.attributes) - local_keys.update(keys.symmetric_difference(common_keys)) - common_keys.intersection_update(keys) - different_value_keys = [] - for key in common_keys: - if np.any(attributes[key] != cube.attributes[key]): - different_value_keys.append(key) - common_keys.difference_update(different_value_keys) - local_keys.update(different_value_keys) + # Legacy mode: calculate "local_keys" to control which attributes are local + # and which global. + # TODO: when iris.FUTURE.save_split_attrs is removed, this section can also be + # removed + message = ( + "Saving to netcdf with legacy-style attribute handling for backwards " + "compatibility.\n" + "This mode is deprecated since Iris 3.8, and will eventually be removed.\n" + "Please consider enabling the new split-attributes handling mode, by " + "setting 'iris.FUTURE.save_split_attrs = True'." + ) + warn_deprecated(message) + + if local_keys is None: + local_keys = set() + else: + local_keys = set(local_keys) + + # Determine the attribute keys that are common across all cubes and + # thereby extend the collection of local_keys for attributes + # that should be attributes on data variables. + attributes = cubes[0].attributes + common_keys = set(attributes) + for cube in cubes[1:]: + keys = set(cube.attributes) + local_keys.update(keys.symmetric_difference(common_keys)) + common_keys.intersection_update(keys) + different_value_keys = [] + for key in common_keys: + if np.any(attributes[key] != cube.attributes[key]): + different_value_keys.append(key) + common_keys.difference_update(different_value_keys) + local_keys.update(different_value_keys) def is_valid_packspec(p): """Only checks that the datatype is valid.""" @@ -2967,7 +3098,12 @@ def is_valid_packspec(p): warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) # Add conventions attribute. - sman.update_global_attributes(Conventions=conventions) + if iris.FUTURE.save_split_attrs: + # In the "new way", we just create all the global attributes at once. + global_attributes["Conventions"] = conventions + sman.update_global_attributes(global_attributes) + else: + sman.update_global_attributes(Conventions=conventions) if compute: # No more to do, since we used Saver(compute=True). diff --git a/lib/iris/tests/integration/attrs_matrix_results_load.json b/lib/iris/tests/integration/attrs_matrix_results_load.json new file mode 100644 index 0000000000..a1d37708a9 --- /dev/null +++ b/lib/iris/tests/integration/attrs_matrix_results_load.json @@ -0,0 +1,1019 @@ +{ + "case_singlevar_localonly": { + "input": "G-La", + "localstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "G-La" + ] + }, + "globalstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "G-La" + ] + }, + "userstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "G-La" + ] + } + }, + "case_singlevar_globalonly": { + "input": "GaL-", + "localstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "GaL-" + ] + }, + "globalstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "GaL-" + ] + }, + "userstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "GaL-" + ] + } + }, + "case_singlevar_glsame": { + "input": "GaLa", + "localstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "GaLa" + ] + }, + "globalstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "GaLa" + ] + }, + "userstyle": { + "legacy": [ + "G-La" + ], + "newstyle": [ + "GaLa" + ] + } + }, + "case_singlevar_gldiffer": { + "input": "GaLb", + "localstyle": { + "legacy": [ + "G-Lb" + ], + "newstyle": [ + "GaLb" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lb" + ], + "newstyle": [ + "GaLb" + ] + }, + "userstyle": { + "legacy": [ + "G-Lb" + ], + "newstyle": [ + "GaLb" + ] + } + }, + "case_multivar_same_noglobal": { + "input": "G-Laa", + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-Laa" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-Laa" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-Laa" + ] + } + }, + "case_multivar_same_sameglobal": { + "input": "GaLaa", + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLaa" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLaa" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLaa" + ] + } + }, + "case_multivar_same_diffglobal": { + "input": "GaLbb", + "localstyle": { + "legacy": [ + "G-Lbb" + ], + "newstyle": [ + "GaLbb" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lbb" + ], + "newstyle": [ + "GaLbb" + ] + }, + "userstyle": { + "legacy": [ + "G-Lbb" + ], + "newstyle": [ + "GaLbb" + ] + } + }, + "case_multivar_differ_noglobal": { + "input": "G-Lab", + "localstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lab" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lab" + ] + }, + "userstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lab" + ] + } + }, + "case_multivar_differ_diffglobal": { + "input": "GaLbc", + "localstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLbc" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLbc" + ] + }, + "userstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLbc" + ] + } + }, + "case_multivar_differ_sameglobal": { + "input": "GaLab", + "localstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLab" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLab" + ] + }, + "userstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLab" + ] + } + }, + "case_multivar_1none_noglobal": { + "input": "G-La-", + "localstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-La-" + ] + }, + "globalstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-La-" + ] + }, + "userstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-La-" + ] + } + }, + "case_multivar_1none_diffglobal": { + "input": "GaLb-", + "localstyle": { + "legacy": [ + "G-Lba" + ], + "newstyle": [ + "GaLb-" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lba" + ], + "newstyle": [ + "GaLb-" + ] + }, + "userstyle": { + "legacy": [ + "G-Lba" + ], + "newstyle": [ + "GaLb-" + ] + } + }, + "case_multivar_1none_sameglobal": { + "input": "GaLa-", + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLa-" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLa-" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLa-" + ] + } + }, + "case_multisource_gsame_lnone": { + "input": [ + "GaL-", + "GaL-" + ], + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaL--" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaL--" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaL--" + ] + } + }, + "case_multisource_gsame_lallsame": { + "input": [ + "GaLa", + "GaLa" + ], + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLaa" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLaa" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLaa" + ] + } + }, + "case_multisource_gsame_l1same1none": { + "input": [ + "GaLa", + "GaL-" + ], + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLa-" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLa-" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "GaLa-" + ] + } + }, + "case_multisource_gsame_l1same1other": { + "input": [ + "GaLa", + "GaLb" + ], + "localstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLab" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLab" + ] + }, + "userstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLab" + ] + } + }, + "case_multisource_gsame_lallother": { + "input": [ + "GaLb", + "GaLb" + ], + "localstyle": { + "legacy": [ + "G-Lbb" + ], + "newstyle": [ + "GaLbb" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lbb" + ], + "newstyle": [ + "GaLbb" + ] + }, + "userstyle": { + "legacy": [ + "G-Lbb" + ], + "newstyle": [ + "GaLbb" + ] + } + }, + "case_multisource_gsame_lalldiffer": { + "input": [ + "GaLb", + "GaLc" + ], + "localstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLbc" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLbc" + ] + }, + "userstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLbc" + ] + } + }, + "case_multisource_gnone_l1one1none": { + "input": [ + "G-La", + "G-L-" + ], + "localstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-La-" + ] + }, + "globalstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-La-" + ] + }, + "userstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-La-" + ] + } + }, + "case_multisource_gnone_l1one1same": { + "input": [ + "G-La", + "G-La" + ], + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-Laa" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-Laa" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-Laa" + ] + } + }, + "case_multisource_gnone_l1one1other": { + "input": [ + "G-La", + "G-Lb" + ], + "localstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lab" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lab" + ] + }, + "userstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lab" + ] + } + }, + "case_multisource_g1none_lnone": { + "input": [ + "GaL-", + "G-L-" + ], + "localstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-L-", + "GaL-" + ] + }, + "globalstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-L-", + "GaL-" + ] + }, + "userstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-L-", + "GaL-" + ] + } + }, + "case_multisource_g1none_l1same1none": { + "input": [ + "GaLa", + "G-L-" + ], + "localstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-L-", + "GaLa" + ] + }, + "globalstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-L-", + "GaLa" + ] + }, + "userstyle": { + "legacy": [ + "G-La-" + ], + "newstyle": [ + "G-L-", + "GaLa" + ] + } + }, + "case_multisource_g1none_l1none1same": { + "input": [ + "GaL-", + "G-La" + ], + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-La", + "GaL-" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-La", + "GaL-" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-La", + "GaL-" + ] + } + }, + "case_multisource_g1none_l1diff1none": { + "input": [ + "GaLb", + "G-L-" + ], + "localstyle": { + "legacy": [ + "G-Lb-" + ], + "newstyle": [ + "G-L-", + "GaLb" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lb-" + ], + "newstyle": [ + "G-L-", + "GaLb" + ] + }, + "userstyle": { + "legacy": [ + "G-Lb-" + ], + "newstyle": [ + "G-L-", + "GaLb" + ] + } + }, + "case_multisource_g1none_l1none1diff": { + "input": [ + "GaL-", + "G-Lb" + ], + "localstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lb", + "GaL-" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lb", + "GaL-" + ] + }, + "userstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "G-Lb", + "GaL-" + ] + } + }, + "case_multisource_g1none_lallsame": { + "input": [ + "GaLa", + "G-La" + ], + "localstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-La", + "GaLa" + ] + }, + "globalstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-La", + "GaLa" + ] + }, + "userstyle": { + "legacy": [ + "G-Laa" + ], + "newstyle": [ + "G-La", + "GaLa" + ] + } + }, + "case_multisource_g1none_lallother": { + "input": [ + "GaLc", + "G-Lc" + ], + "localstyle": { + "legacy": [ + "G-Lcc" + ], + "newstyle": [ + "G-Lc", + "GaLc" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lcc" + ], + "newstyle": [ + "G-Lc", + "GaLc" + ] + }, + "userstyle": { + "legacy": [ + "G-Lcc" + ], + "newstyle": [ + "G-Lc", + "GaLc" + ] + } + }, + "case_multisource_gdiff_lnone": { + "input": [ + "GaL-", + "GbL-" + ], + "localstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaL-", + "GbL-" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaL-", + "GbL-" + ] + }, + "userstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaL-", + "GbL-" + ] + } + }, + "case_multisource_gdiff_l1same1none": { + "input": [ + "GaLa", + "GbL-" + ], + "localstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLa", + "GbL-" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLa", + "GbL-" + ] + }, + "userstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLa", + "GbL-" + ] + } + }, + "case_multisource_gdiff_l1diff1none": { + "input": [ + "GaLb", + "GcL-" + ], + "localstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLb", + "GcL-" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLb", + "GcL-" + ] + }, + "userstyle": { + "legacy": [ + "G-Lbc" + ], + "newstyle": [ + "GaLb", + "GcL-" + ] + } + }, + "case_multisource_gdiff_lallsame": { + "input": [ + "GaLa", + "GbLb" + ], + "localstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLa", + "GbLb" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLa", + "GbLb" + ] + }, + "userstyle": { + "legacy": [ + "G-Lab" + ], + "newstyle": [ + "GaLa", + "GbLb" + ] + } + }, + "case_multisource_gdiff_lallother": { + "input": [ + "GaLc", + "GbLc" + ], + "localstyle": { + "legacy": [ + "G-Lcc" + ], + "newstyle": [ + "GaLc", + "GbLc" + ] + }, + "globalstyle": { + "legacy": [ + "G-Lcc" + ], + "newstyle": [ + "GaLc", + "GbLc" + ] + }, + "userstyle": { + "legacy": [ + "G-Lcc" + ], + "newstyle": [ + "GaLc", + "GbLc" + ] + } + } +} \ No newline at end of file diff --git a/lib/iris/tests/integration/attrs_matrix_results_roundtrip.json b/lib/iris/tests/integration/attrs_matrix_results_roundtrip.json new file mode 100644 index 0000000000..3446c7f312 --- /dev/null +++ b/lib/iris/tests/integration/attrs_matrix_results_roundtrip.json @@ -0,0 +1,983 @@ +{ + "case_singlevar_localonly": { + "input": "G-La", + "localstyle": { + "unsplit": [ + "G-La" + ], + "split": [ + "G-La" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "G-La" + ] + }, + "userstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "G-La" + ] + } + }, + "case_singlevar_globalonly": { + "input": "GaL-", + "localstyle": { + "unsplit": [ + "G-La" + ], + "split": [ + "GaL-" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "GaL-" + ] + }, + "userstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "GaL-" + ] + } + }, + "case_singlevar_glsame": { + "input": "GaLa", + "localstyle": { + "unsplit": [ + "G-La" + ], + "split": [ + "GaLa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "GaLa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "GaLa" + ] + } + }, + "case_singlevar_gldiffer": { + "input": "GaLb", + "localstyle": { + "unsplit": [ + "G-Lb" + ], + "split": [ + "GaLb" + ] + }, + "globalstyle": { + "unsplit": [ + "GbL-" + ], + "split": [ + "GaLb" + ] + }, + "userstyle": { + "unsplit": [ + "GbL-" + ], + "split": [ + "GaLb" + ] + } + }, + "case_multivar_same_noglobal": { + "input": "G-Laa", + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "G-Laa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + } + }, + "case_multivar_same_sameglobal": { + "input": "GaLaa", + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaLaa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLaa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLaa" + ] + } + }, + "case_multivar_same_diffglobal": { + "input": "GaLbb", + "localstyle": { + "unsplit": [ + "G-Lbb" + ], + "split": [ + "GaLbb" + ] + }, + "globalstyle": { + "unsplit": [ + "GbL--" + ], + "split": [ + "GaLbb" + ] + }, + "userstyle": { + "unsplit": [ + "GbL--" + ], + "split": [ + "GaLbb" + ] + } + }, + "case_multivar_differ_noglobal": { + "input": "G-Lab", + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multivar_differ_diffglobal": { + "input": "GaLbc", + "localstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + } + }, + "case_multivar_differ_sameglobal": { + "input": "GaLab", + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + } + }, + "case_multivar_1none_noglobal": { + "input": "G-La-", + "localstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "userstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + } + }, + "case_multivar_1none_diffglobal": { + "input": "GaLb-", + "localstyle": { + "unsplit": [ + "G-Lba" + ], + "split": [ + "GaLb-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lba" + ], + "split": [ + "GaLb-" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lba" + ], + "split": [ + "GaLb-" + ] + } + }, + "case_multivar_1none_sameglobal": { + "input": "GaLa-", + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaLa-" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLa-" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLa-" + ] + } + }, + "case_multisource_gsame_lnone": { + "input": [ + "GaL-", + "GaL-" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaL--" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaL--" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaL--" + ] + } + }, + "case_multisource_gsame_lallsame": { + "input": [ + "GaLa", + "GaLa" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaLaa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLaa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLaa" + ] + } + }, + "case_multisource_gsame_l1same1none": { + "input": [ + "GaLa", + "GaL-" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaLa-" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLa-" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLa-" + ] + } + }, + "case_multisource_gsame_l1same1other": { + "input": [ + "GaLa", + "GaLb" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + } + }, + "case_multisource_gsame_lallother": { + "input": [ + "GaLb", + "GaLb" + ], + "localstyle": { + "unsplit": [ + "G-Lbb" + ], + "split": [ + "GaLbb" + ] + }, + "globalstyle": { + "unsplit": [ + "GbL--" + ], + "split": [ + "GaLbb" + ] + }, + "userstyle": { + "unsplit": [ + "GbL--" + ], + "split": [ + "GaLbb" + ] + } + }, + "case_multisource_gsame_lalldiffer": { + "input": [ + "GaLb", + "GaLc" + ], + "localstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + } + }, + "case_multisource_gnone_l1one1none": { + "input": [ + "G-La", + "G-L-" + ], + "localstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "userstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + } + }, + "case_multisource_gnone_l1one1same": { + "input": [ + "G-La", + "G-La" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "G-Laa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + } + }, + "case_multisource_gnone_l1one1other": { + "input": [ + "G-La", + "G-Lb" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_g1none_lnone": { + "input": [ + "GaL-", + "G-L-" + ], + "localstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "userstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + } + }, + "case_multisource_g1none_l1same1none": { + "input": [ + "GaLa", + "G-L-" + ], + "localstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "userstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + } + }, + "case_multisource_g1none_l1none1same": { + "input": [ + "GaL-", + "G-La" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "G-Laa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + } + }, + "case_multisource_g1none_l1diff1none": { + "input": [ + "GaLb", + "G-L-" + ], + "localstyle": { + "unsplit": [ + "G-Lb-" + ], + "split": [ + "G-Lb-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lb-" + ], + "split": [ + "G-Lb-" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lb-" + ], + "split": [ + "G-Lb-" + ] + } + }, + "case_multisource_g1none_l1none1diff": { + "input": [ + "GaL-", + "G-Lb" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_g1none_lallsame": { + "input": [ + "GaLa", + "G-La" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "G-Laa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + } + }, + "case_multisource_g1none_lallother": { + "input": [ + "GaLc", + "G-Lc" + ], + "localstyle": { + "unsplit": [ + "G-Lcc" + ], + "split": [ + "G-Lcc" + ] + }, + "globalstyle": { + "unsplit": [ + "GcL--" + ], + "split": [ + "G-Lcc" + ] + }, + "userstyle": { + "unsplit": [ + "GcL--" + ], + "split": [ + "G-Lcc" + ] + } + }, + "case_multisource_gdiff_lnone": { + "input": [ + "GaL-", + "GbL-" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_gdiff_l1same1none": { + "input": [ + "GaLa", + "GbL-" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_gdiff_l1diff1none": { + "input": [ + "GaLb", + "GcL-" + ], + "localstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "G-Lbc" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "G-Lbc" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "G-Lbc" + ] + } + }, + "case_multisource_gdiff_lallsame": { + "input": [ + "GaLa", + "GbLb" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_gdiff_lallother": { + "input": [ + "GaLc", + "GbLc" + ], + "localstyle": { + "unsplit": [ + "G-Lcc" + ], + "split": [ + "G-Lcc" + ] + }, + "globalstyle": { + "unsplit": [ + "GcL--" + ], + "split": [ + "G-Lcc" + ] + }, + "userstyle": { + "unsplit": [ + "GcL--" + ], + "split": [ + "G-Lcc" + ] + } + } +} \ No newline at end of file diff --git a/lib/iris/tests/integration/attrs_matrix_results_save.json b/lib/iris/tests/integration/attrs_matrix_results_save.json new file mode 100644 index 0000000000..3446c7f312 --- /dev/null +++ b/lib/iris/tests/integration/attrs_matrix_results_save.json @@ -0,0 +1,983 @@ +{ + "case_singlevar_localonly": { + "input": "G-La", + "localstyle": { + "unsplit": [ + "G-La" + ], + "split": [ + "G-La" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "G-La" + ] + }, + "userstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "G-La" + ] + } + }, + "case_singlevar_globalonly": { + "input": "GaL-", + "localstyle": { + "unsplit": [ + "G-La" + ], + "split": [ + "GaL-" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "GaL-" + ] + }, + "userstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "GaL-" + ] + } + }, + "case_singlevar_glsame": { + "input": "GaLa", + "localstyle": { + "unsplit": [ + "G-La" + ], + "split": [ + "GaLa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "GaLa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL-" + ], + "split": [ + "GaLa" + ] + } + }, + "case_singlevar_gldiffer": { + "input": "GaLb", + "localstyle": { + "unsplit": [ + "G-Lb" + ], + "split": [ + "GaLb" + ] + }, + "globalstyle": { + "unsplit": [ + "GbL-" + ], + "split": [ + "GaLb" + ] + }, + "userstyle": { + "unsplit": [ + "GbL-" + ], + "split": [ + "GaLb" + ] + } + }, + "case_multivar_same_noglobal": { + "input": "G-Laa", + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "G-Laa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + } + }, + "case_multivar_same_sameglobal": { + "input": "GaLaa", + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaLaa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLaa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLaa" + ] + } + }, + "case_multivar_same_diffglobal": { + "input": "GaLbb", + "localstyle": { + "unsplit": [ + "G-Lbb" + ], + "split": [ + "GaLbb" + ] + }, + "globalstyle": { + "unsplit": [ + "GbL--" + ], + "split": [ + "GaLbb" + ] + }, + "userstyle": { + "unsplit": [ + "GbL--" + ], + "split": [ + "GaLbb" + ] + } + }, + "case_multivar_differ_noglobal": { + "input": "G-Lab", + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multivar_differ_diffglobal": { + "input": "GaLbc", + "localstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + } + }, + "case_multivar_differ_sameglobal": { + "input": "GaLab", + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + } + }, + "case_multivar_1none_noglobal": { + "input": "G-La-", + "localstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "userstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + } + }, + "case_multivar_1none_diffglobal": { + "input": "GaLb-", + "localstyle": { + "unsplit": [ + "G-Lba" + ], + "split": [ + "GaLb-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lba" + ], + "split": [ + "GaLb-" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lba" + ], + "split": [ + "GaLb-" + ] + } + }, + "case_multivar_1none_sameglobal": { + "input": "GaLa-", + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaLa-" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLa-" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLa-" + ] + } + }, + "case_multisource_gsame_lnone": { + "input": [ + "GaL-", + "GaL-" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaL--" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaL--" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaL--" + ] + } + }, + "case_multisource_gsame_lallsame": { + "input": [ + "GaLa", + "GaLa" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaLaa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLaa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLaa" + ] + } + }, + "case_multisource_gsame_l1same1none": { + "input": [ + "GaLa", + "GaL-" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "GaLa-" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLa-" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "GaLa-" + ] + } + }, + "case_multisource_gsame_l1same1other": { + "input": [ + "GaLa", + "GaLb" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "GaLab" + ] + } + }, + "case_multisource_gsame_lallother": { + "input": [ + "GaLb", + "GaLb" + ], + "localstyle": { + "unsplit": [ + "G-Lbb" + ], + "split": [ + "GaLbb" + ] + }, + "globalstyle": { + "unsplit": [ + "GbL--" + ], + "split": [ + "GaLbb" + ] + }, + "userstyle": { + "unsplit": [ + "GbL--" + ], + "split": [ + "GaLbb" + ] + } + }, + "case_multisource_gsame_lalldiffer": { + "input": [ + "GaLb", + "GaLc" + ], + "localstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "GaLbc" + ] + } + }, + "case_multisource_gnone_l1one1none": { + "input": [ + "G-La", + "G-L-" + ], + "localstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "userstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + } + }, + "case_multisource_gnone_l1one1same": { + "input": [ + "G-La", + "G-La" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "G-Laa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + } + }, + "case_multisource_gnone_l1one1other": { + "input": [ + "G-La", + "G-Lb" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_g1none_lnone": { + "input": [ + "GaL-", + "G-L-" + ], + "localstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "userstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + } + }, + "case_multisource_g1none_l1same1none": { + "input": [ + "GaLa", + "G-L-" + ], + "localstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + }, + "userstyle": { + "unsplit": [ + "G-La-" + ], + "split": [ + "G-La-" + ] + } + }, + "case_multisource_g1none_l1none1same": { + "input": [ + "GaL-", + "G-La" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "G-Laa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + } + }, + "case_multisource_g1none_l1diff1none": { + "input": [ + "GaLb", + "G-L-" + ], + "localstyle": { + "unsplit": [ + "G-Lb-" + ], + "split": [ + "G-Lb-" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lb-" + ], + "split": [ + "G-Lb-" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lb-" + ], + "split": [ + "G-Lb-" + ] + } + }, + "case_multisource_g1none_l1none1diff": { + "input": [ + "GaL-", + "G-Lb" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_g1none_lallsame": { + "input": [ + "GaLa", + "G-La" + ], + "localstyle": { + "unsplit": [ + "G-Laa" + ], + "split": [ + "G-Laa" + ] + }, + "globalstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + }, + "userstyle": { + "unsplit": [ + "GaL--" + ], + "split": [ + "G-Laa" + ] + } + }, + "case_multisource_g1none_lallother": { + "input": [ + "GaLc", + "G-Lc" + ], + "localstyle": { + "unsplit": [ + "G-Lcc" + ], + "split": [ + "G-Lcc" + ] + }, + "globalstyle": { + "unsplit": [ + "GcL--" + ], + "split": [ + "G-Lcc" + ] + }, + "userstyle": { + "unsplit": [ + "GcL--" + ], + "split": [ + "G-Lcc" + ] + } + }, + "case_multisource_gdiff_lnone": { + "input": [ + "GaL-", + "GbL-" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_gdiff_l1same1none": { + "input": [ + "GaLa", + "GbL-" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_gdiff_l1diff1none": { + "input": [ + "GaLb", + "GcL-" + ], + "localstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "G-Lbc" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "G-Lbc" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lbc" + ], + "split": [ + "G-Lbc" + ] + } + }, + "case_multisource_gdiff_lallsame": { + "input": [ + "GaLa", + "GbLb" + ], + "localstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "globalstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + }, + "userstyle": { + "unsplit": [ + "G-Lab" + ], + "split": [ + "G-Lab" + ] + } + }, + "case_multisource_gdiff_lallother": { + "input": [ + "GaLc", + "GbLc" + ], + "localstyle": { + "unsplit": [ + "G-Lcc" + ], + "split": [ + "G-Lcc" + ] + }, + "globalstyle": { + "unsplit": [ + "GcL--" + ], + "split": [ + "G-Lcc" + ] + }, + "userstyle": { + "unsplit": [ + "GcL--" + ], + "split": [ + "G-Lcc" + ] + } + } +} \ No newline at end of file diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index d3f2ce22c4..c8c218000c 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -23,6 +23,13 @@ class Test__lazy_stream_data: + # Ensure all saves are done with split-atttribute saving, + # -- because some of these tests are sensitive to unexpected warnings. + @pytest.fixture(autouse=True) + def all_saves_with_split_attrs(self): + with iris.FUTURE.context(save_split_attrs=True): + yield + @pytest.fixture(autouse=True) def output_path(self, tmp_path): # A temporary output netcdf-file path, **unique to each test call**. diff --git a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py new file mode 100644 index 0000000000..b09b408827 --- /dev/null +++ b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py @@ -0,0 +1,1678 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +""" +Integration tests for loading and saving netcdf file attributes. + +Notes: +(1) attributes in netCDF files can be either "global attributes", or variable +("local") type. + +(2) in CF terms, this testcode classifies specific attributes (names) as either +"global" = names recognised by convention as normally stored in a file-global +setting; "local" = recognised names specifying details of variable data +encoding, which only make sense as a "local" attribute (i.e. on a variable), +and "user" = any additional attributes *not* recognised in conventions, which +might be recorded either globally or locally. + +""" +import inspect +import json +import os +from pathlib import Path +import re +from typing import Iterable, List, Optional, Union +import warnings + +import numpy as np +import pytest + +import iris +import iris.coord_systems +from iris.coords import DimCoord +from iris.cube import Cube +import iris.fileformats.netcdf +import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc4 + +# First define the known controlled attribute names defined by netCDf and CF conventions +# +# Note: certain attributes are "normally" global (e.g. "Conventions"), whilst others +# will only usually appear on a data-variable (e.g. "scale_factor"", "coordinates"). +# I'm calling these 'global-style' and 'local-style'. +# Any attributes either belongs to one of these 2 groups, or neither. Those 3 distinct +# types may then have different behaviour in Iris load + save. + +# A list of "global-style" attribute names : those which should be global attributes by +# default (i.e. file- or group-level, *not* attached to a variable). + +_GLOBAL_TEST_ATTRS = set(iris.fileformats.netcdf.saver._CF_GLOBAL_ATTRS) +# Remove this one, which has peculiar behaviour + is tested separately +# N.B. this is not the same as 'Conventions', but is caught in the crossfire when that +# one is processed. +_GLOBAL_TEST_ATTRS -= set(["conventions"]) +_GLOBAL_TEST_ATTRS = sorted(_GLOBAL_TEST_ATTRS) + + +# Define a fixture to parametrise tests over the 'global-style' test attributes. +# This just provides a more concise way of writing parametrised tests. +@pytest.fixture(params=_GLOBAL_TEST_ATTRS) +def global_attr(request): + # N.B. "request" is a standard PyTest fixture + return request.param # Return the name of the attribute to test. + + +# A list of "local-style" attribute names : those which should be variable attributes +# by default (aka "local", "variable" or "data" attributes) . +_LOCAL_TEST_ATTRS = ( + iris.fileformats.netcdf.saver._CF_DATA_ATTRS + + iris.fileformats.netcdf.saver._UKMO_DATA_ATTRS +) + + +# Define a fixture to parametrise over the 'local-style' test attributes. +# This just provides a more concise way of writing parametrised tests. +@pytest.fixture(params=_LOCAL_TEST_ATTRS) +def local_attr(request): + # N.B. "request" is a standard PyTest fixture + return request.param # Return the name of the attribute to test. + + +# Define whether to parametrise over split-attribute saving +# Just for now, so that we can run against legacy code. +_SPLIT_SAVE_SUPPORTED = hasattr(iris.FUTURE, "save_split_attrs") +_SPLIT_PARAM_VALUES = [False, True] +_SPLIT_PARAM_IDS = ["nosplit", "split"] +_MATRIX_LOAD_RESULTSTYLES = ["legacy", "newstyle"] +if not _SPLIT_SAVE_SUPPORTED: + _SPLIT_PARAM_VALUES.remove(True) + _SPLIT_PARAM_IDS.remove("split") + _MATRIX_LOAD_RESULTSTYLES.remove("newstyle") + + +_SKIP_WARNCHECK = "_no_warnings_check" + + +def check_captured_warnings( + expected_keys: List[str], + captured_warnings: List[warnings.WarningMessage], + allow_possible_legacy_warning: bool = False, +): + """ + Compare captured warning messages with a list of regexp-matches. + + We allow them to occur in any order, and replace each actual result in the list + with its matching regexp, if any, as this makes failure results much easier to + comprehend. + + """ + # TODO: when iris.FUTURE.save_split_attrs is removed, we can remove the + # 'allow_possible_legacy_warning' arg. + + if expected_keys is None: + expected_keys = [] + elif hasattr(expected_keys, "upper"): + # Handle a single string + if expected_keys == _SKIP_WARNCHECK: + # No check at all in this case + return + expected_keys = [expected_keys] + + if allow_possible_legacy_warning: + # Allow but do not require a "saving without split-attributes" warning. + legacy_message_key = ( + "Saving to netcdf with legacy-style attribute handling for backwards " + "compatibility." + ) + expected_keys.append(legacy_message_key) + + expected_keys = [re.compile(key) for key in expected_keys] + found_results = [str(warning.message) for warning in captured_warnings] + remaining_keys = expected_keys.copy() + for i_message, message in enumerate(found_results.copy()): + for key in remaining_keys: + if key.search(message): + # Hit : replace one message in the list with its matching "key" + found_results[i_message] = key + # remove the matching key + remaining_keys.remove(key) + # skip on to next message + break + + if allow_possible_legacy_warning: + # Remove any unused "legacy attribute saving" key. + # N.B. this is the *only* key we will tolerate not being used. + expected_keys = [ + key for key in expected_keys if key != legacy_message_key + ] + + assert set(found_results) == set(expected_keys) + + +class MixinAttrsTesting: + @staticmethod + def _calling_testname(): + """ + Search up the callstack for a function named "test_*", and return the name for + use as a test identifier. + + Idea borrowed from :meth:`iris.tests.IrisTest.result_path`. + + Returns + ------- + test_name : str + Returns a string, with the initial "test_" removed. + """ + test_name = None + stack = inspect.stack() + for frame in stack[1:]: + full_name = frame[3] + if full_name.startswith("test_"): + # Return the name with the initial "test_" removed. + test_name = full_name.replace("test_", "") + break + # Search should not fail, unless we were called from an inappropriate place? + assert test_name is not None + return test_name + + @pytest.fixture(autouse=True) + def make_tempdir(self, tmp_path_factory): + """ + Automatically-run fixture to activate the 'tmp_path_factory' fixture on *every* + test: Make a directory for temporary files, and record it on the test instance. + + N.B. "tmp_path_factory" is a standard PyTest fixture, which provides a dirpath + *shared* by all tests. This is a bit quicker and more debuggable than having a + directory per-testcase. + """ + # Store the temporary directory path on the test instance + self.tmpdir = str(tmp_path_factory.getbasetemp()) + + def _testfile_path(self, basename: str) -> str: + # Make a filepath in the temporary directory, based on the name of the calling + # test method, and the "self.attrname" it sets up. + testname = self._calling_testname() + # Turn that into a suitable temporary filename + ext_name = getattr(self, "testname_extension", "") + if ext_name: + basename = basename + "_" + ext_name + path_str = f"{self.tmpdir}/{self.__class__.__name__}__test_{testname}-{self.attrname}__{basename}.nc" + return path_str + + @staticmethod + def _default_vars_and_attrvalues(vars_and_attrvalues): + # Simple default strategy : turn a simple value into {'var': value} + if not isinstance(vars_and_attrvalues, dict): + # Treat single non-dict argument as a value for a single variable + vars_and_attrvalues = {"var": vars_and_attrvalues} + return vars_and_attrvalues + + def create_testcase_files_or_cubes( + self, + attr_name: str, + global_value_file1: Optional[str] = None, + var_values_file1: Union[None, str, dict] = None, + global_value_file2: Optional[str] = None, + var_values_file2: Union[None, str, dict] = None, + cubes: bool = False, + ): + """ + Create temporary input netcdf files, or cubes, with specific content. + + Creates a temporary netcdf test file (or two) with the given global and + variable-local attributes. Or build cubes, similarly. + If ``cubes`` is ``True``, save cubes in ``self.input_cubes``. + Else save filepaths in ``self.input_filepaths``. + + Note: 'var_values_file' args are dictionaries. The named variables are + created, with an attribute = the dictionary value, *except* that a dictionary + value of None means that a local attribute is _not_ created on the variable. + """ + # save attribute on the instance + self.attrname = attr_name + + if not cubes: + # Make some input file paths. + filepath1 = self._testfile_path("testfile") + filepath2 = self._testfile_path("testfile2") + + def make_file( + filepath: str, global_value=None, var_values=None + ) -> str: + ds = threadsafe_nc4.DatasetWrapper(filepath, "w") + if global_value is not None: + ds.setncattr(attr_name, global_value) + ds.createDimension("x", 3) + # Rationalise the per-variable requirements + # N.B. this *always* makes at least one variable, as otherwise we would + # load no cubes. + var_values = self._default_vars_and_attrvalues(var_values) + for var_name, value in var_values.items(): + v = ds.createVariable(var_name, int, ("x",)) + if value is not None: + v.setncattr(attr_name, value) + ds.close() + return filepath + + def make_cubes(var_name, global_value=None, var_values=None): + cubes = [] + var_values = self._default_vars_and_attrvalues(var_values) + for varname, local_value in var_values.items(): + cube = Cube(np.arange(3.0), var_name=var_name) + cubes.append(cube) + dimco = DimCoord(np.arange(3.0), var_name="x") + cube.add_dim_coord(dimco, 0) + if not hasattr(cube.attributes, "globals"): + # N.B. For now, also support oldstyle "single" cube attribute + # dictionaries, so that we can generate legacy results to compore + # with the "new world" results. + single_value = global_value + if local_value is not None: + single_value = local_value + if single_value is not None: + cube.attributes[attr_name] = single_value + else: + if global_value is not None: + cube.attributes.globals[attr_name] = global_value + if local_value is not None: + cube.attributes.locals[attr_name] = local_value + return cubes + + if cubes: + results = make_cubes("v1", global_value_file1, var_values_file1) + if global_value_file2 is not None or var_values_file2 is not None: + results.extend( + make_cubes("v2", global_value_file2, var_values_file2) + ) + else: + results = [ + make_file(filepath1, global_value_file1, var_values_file1) + ] + if global_value_file2 is not None or var_values_file2 is not None: + # Make a second testfile and add it to files-to-be-loaded. + results.append( + make_file(filepath2, global_value_file2, var_values_file2) + ) + + # Save results on the instance + if cubes: + self.input_cubes = results + else: + self.input_filepaths = results + return results + + def run_testcase( + self, + attr_name: str, + values: Union[List, List[List]], + create_cubes_or_files: str = "files", + ) -> None: + """ + Create testcase inputs (files or cubes) with specified attributes. + + Parameters + ---------- + attr_name : str + name for all attributes created in this testcase. + Also saved as ``self.attrname``, as used by ``fetch_results``. + values : list + list, or lists, of values for created attributes, each containing one global + and one-or-more local attribute values as [global, local1, local2...] + create_cubes_or_files : str, default "files" + create either cubes or testfiles. + + If ``create_cubes_or_files`` == "files", create one temporary netCDF file per + values-list, and record in ``self.input_filepaths``. + Else if ``create_cubes_or_files`` == "cubes", create sets of cubes with common + global values and store all of them to ``self.input_cubes``. + + """ + # Save common attribute-name on the instance + self.attrname = attr_name + + # Standardise input to a list-of-lists, each inner list = [global, *locals] + assert isinstance(values, list) + if not isinstance(values[0], list): + values = [values] + assert len(values) in (1, 2) + assert len(values[0]) > 1 + + # Decode into global1, *locals1, and optionally global2, *locals2 + global1 = values[0][0] + vars1 = {} + i_var = 0 + for value in values[0][1:]: + vars1[f"var_{i_var}"] = value + i_var += 1 + if len(values) == 1: + global2 = None + vars2 = None + else: + assert len(values) == 2 + global2 = values[1][0] + vars2 = {} + for value in values[1][1:]: + vars2[f"var_{i_var}"] = value + i_var += 1 + + # Create test files or cubes (and store data on the instance) + assert create_cubes_or_files in ("cubes", "files") + make_cubes = create_cubes_or_files == "cubes" + self.create_testcase_files_or_cubes( + attr_name=attr_name, + global_value_file1=global1, + var_values_file1=vars1, + global_value_file2=global2, + var_values_file2=vars2, + cubes=make_cubes, + ) + + def fetch_results( + self, + filepath: str = None, + cubes: Iterable[Cube] = None, + oldstyle_combined: bool = False, + ): + """ + Return testcase results from an output file or cubes in a standardised form. + + Unpick the global+local values of the attribute ``self.attrname``, resulting + from a test operation. + A file result is always [global_value, *local_values] + A cubes result is [*[global_value, *local_values]] (over different global vals) + + When ``oldstyle_combined`` is ``True``, simulate the "legacy" style results, + that is when each cube had a single combined attribute dictionary. + This enables us to check against former behaviour, by combining results into a + single dictionary. N.B. per-cube single results are then returned in the form: + [None, cube1, cube2...]. + N.B. if results are from a *file*, this key has **no effect**. + + """ + attr_name = self.attrname + if filepath is not None: + # Fetch global and local values from a file + try: + ds = threadsafe_nc4.DatasetWrapper(filepath) + global_result = ( + ds.getncattr(attr_name) + if attr_name in ds.ncattrs() + else None + ) + # Fetch local attr value from all data variables : In our testcases, + # that is all *except* dimcoords (ones named after dimensions). + local_vars_results = [ + ( + var.name, + ( + var.getncattr(attr_name) + if attr_name in var.ncattrs() + else None + ), + ) + for var in ds.variables.values() + if var.name not in ds.dimensions + ] + finally: + ds.close() + # This version always returns a single result set [global, local1[, local2]] + # Return global, plus locals sorted by varname + local_vars_results = sorted(local_vars_results, key=lambda x: x[0]) + results = [global_result] + [val for _, val in local_vars_results] + else: + assert cubes is not None + # Sort result cubes according to a standard ordering. + cubes = sorted(cubes, key=lambda cube: cube.name()) + # Fetch globals and locals from cubes. + # This way returns *multiple* result 'sets', one for each global value + if oldstyle_combined or not _SPLIT_SAVE_SUPPORTED: + # Use all-combined dictionaries in place of actual cubes' attributes + cube_attr_dicts = [dict(cube.attributes) for cube in cubes] + # Return results as if all cubes had global=None + results = [ + [None] + + [ + cube_attr_dict.get(attr_name, None) + for cube_attr_dict in cube_attr_dicts + ] + ] + else: + # Return a result-set for each occurring global value (possibly + # including a 'None'). + global_values = set( + cube.attributes.globals.get(attr_name, None) + for cube in cubes + ) + results = [ + [globalval] + + [ + cube.attributes.locals.get(attr_name, None) + for cube in cubes + if cube.attributes.globals.get(attr_name, None) + == globalval + ] + for globalval in sorted(global_values, key=str) + ] + return results + + +# Define all the testcases for different parameter input structures : +# - combinations of matching+differing, global+local params +# - these are interpreted differently for the 3 main test types : Load/Save/Roundtrip +_MATRIX_TESTCASE_INPUTS = { + "case_singlevar_localonly": "G-La", + "case_singlevar_globalonly": "GaL-", + "case_singlevar_glsame": "GaLa", + "case_singlevar_gldiffer": "GaLb", + "case_multivar_same_noglobal": "G-Laa", + "case_multivar_same_sameglobal": "GaLaa", + "case_multivar_same_diffglobal": "GaLbb", + "case_multivar_differ_noglobal": "G-Lab", + "case_multivar_differ_diffglobal": "GaLbc", + "case_multivar_differ_sameglobal": "GaLab", + "case_multivar_1none_noglobal": "G-La-", + "case_multivar_1none_diffglobal": "GaLb-", + "case_multivar_1none_sameglobal": "GaLa-", + # Note: the multi-set input cases are more complex. + # These are encoded as *pairs* of specs, for 2 different files, or cubes with + # independent global values. + # We assume that there can be nothing "special" about a var's interaction with + # another one from the same (as opposed to the "other") file. + "case_multisource_gsame_lnone": ["GaL-", "GaL-"], + "case_multisource_gsame_lallsame": ["GaLa", "GaLa"], + "case_multisource_gsame_l1same1none": ["GaLa", "GaL-"], + "case_multisource_gsame_l1same1other": ["GaLa", "GaLb"], + "case_multisource_gsame_lallother": ["GaLb", "GaLb"], + "case_multisource_gsame_lalldiffer": ["GaLb", "GaLc"], + "case_multisource_gnone_l1one1none": ["G-La", "G-L-"], + "case_multisource_gnone_l1one1same": ["G-La", "G-La"], + "case_multisource_gnone_l1one1other": ["G-La", "G-Lb"], + "case_multisource_g1none_lnone": ["GaL-", "G-L-"], + "case_multisource_g1none_l1same1none": ["GaLa", "G-L-"], + "case_multisource_g1none_l1none1same": ["GaL-", "G-La"], + "case_multisource_g1none_l1diff1none": ["GaLb", "G-L-"], + "case_multisource_g1none_l1none1diff": ["GaL-", "G-Lb"], + "case_multisource_g1none_lallsame": ["GaLa", "G-La"], + "case_multisource_g1none_lallother": ["GaLc", "G-Lc"], + "case_multisource_gdiff_lnone": ["GaL-", "GbL-"], + "case_multisource_gdiff_l1same1none": ["GaLa", "GbL-"], + "case_multisource_gdiff_l1diff1none": ["GaLb", "GcL-"], + "case_multisource_gdiff_lallsame": ["GaLa", "GbLb"], + "case_multisource_gdiff_lallother": ["GaLc", "GbLc"], +} +_MATRIX_TESTCASES = list(_MATRIX_TESTCASE_INPUTS.keys()) + +# +# Define the attrs against which all matrix tests are run +# +max_param_attrs = None +# max_param_attrs = 5 + +_MATRIX_ATTRNAMES = _LOCAL_TEST_ATTRS[:max_param_attrs] +_MATRIX_ATTRNAMES += _GLOBAL_TEST_ATTRS[:max_param_attrs] +_MATRIX_ATTRNAMES += ["user"] + +# remove special-cases, for now : all these behave irregularly (i.e. unlike the known +# "globalstyle", or "localstyle" generic cases). +# N.B. not including "Conventions", which is not in the globals list, so won't be +# matrix-tested unless we add it specifically. +# TODO: decide if any of these need to be tested, as separate test-styles. +_SPECIAL_ATTRS = [ + "ukmo__process_flags", + "missing_value", + "standard_error_multiplier", + "STASH", + "um_stash_source", +] +_MATRIX_ATTRNAMES = [ + attr for attr in _MATRIX_ATTRNAMES if attr not in _SPECIAL_ATTRS +] + + +# +# A routine to work "backwards" from an attribute name to its "style", i.e. type category. +# Possible styles are "globalstyle", "localstyle", "userstyle". +# +_ATTR_STYLES = ["localstyle", "globalstyle", "userstyle"] + + +def deduce_attr_style(attrname: str) -> str: + # Extract the attribute "style type" from an attr_param name + if attrname in _LOCAL_TEST_ATTRS: + style = "localstyle" + elif attrname in _GLOBAL_TEST_ATTRS: + style = "globalstyle" + else: + assert attrname == "user" + style = "userstyle" + return style + + +# +# Decode a matrix "input spec" to codes for global + local values. +# +def decode_matrix_input(input_spec): + # Decode a matrix-test input specification, like "GaLbc", into lists of values. + # E.G. "GaLbc" -> ["a", "b", "c"] + # ["GaLbc", "GbLbc"] -> [["a", "b", "c"], ["b", "b", c"]] + # N.B. in this form "values" are all one-character strings. + def decode_specstring(spec: str) -> List[Union[str, None]]: + # Decode an input spec-string to input/output attribute values + assert spec[0] == "G" and spec[2] == "L" + allvals = spec[1] + spec[3:] + result = [None if valchar == "-" else valchar for valchar in allvals] + return result + + if isinstance(input_spec, str): + # Single-source spec (one cube or one file) + vals = decode_specstring(input_spec) + result = [vals] + else: + # Dual-source spec (two files, or sets of cubes with a common global value) + vals_A = decode_specstring(input_spec[0]) + vals_B = decode_specstring(input_spec[1]) + result = [vals_A, vals_B] + + return result + + +def encode_matrix_result(results: List[List[str]]) -> List[str]: + # Re-code a set of output results, [*[global-value, *local-values]] as a list of + # strings, like ["GaL-b"] or ["GaLabc", "GbLabc"]. + # N.B. again assuming that all values are just one-character strings, or None. + assert isinstance(results, Iterable) and len(results) >= 1 + if not isinstance(results[0], list): + results = [results] + assert all( + all(val is None or isinstance(val, str) for val in vals) + for vals in results + ) + + # Translate "None" values to "-" + def valrep(val): + return "-" if val is None else val + + results = list( + "".join(["G", valrep(vals[0]), "L"] + list(map(valrep, vals[1:]))) + for vals in results + ) + return results + + +# +# The "expected" matrix test results are stored in JSON files (one for each test-type). +# We can also save the found results. +# +_MATRIX_TESTTYPES = ("load", "save", "roundtrip") + + +@pytest.fixture(autouse=True, scope="session") +def matrix_results(): + matrix_filepaths = { + testtype: ( + Path(__file__).parent / f"attrs_matrix_results_{testtype}.json" + ) + for testtype in _MATRIX_TESTTYPES + } + # An environment variable can trigger saving of the results. + save_matrix_results = bool( + int(os.environ.get("SAVEALL_MATRIX_RESULTS", "0")) + ) + + matrix_results = {} + for testtype in _MATRIX_TESTTYPES: + # Either fetch from file, or initialise, a results matrix for each test type + # (load/save/roundtrip). + input_path = matrix_filepaths[testtype] + if input_path.exists(): + # Load from file with json. + with open(input_path) as file_in: + testtype_results = json.load(file_in) + # Check compatibility (in case we changed the test-specs list) + assert set(testtype_results.keys()) == set(_MATRIX_TESTCASES) + assert all( + testtype_results[key]["input"] == _MATRIX_TESTCASE_INPUTS[key] + for key in _MATRIX_TESTCASES + ) + else: + # Create empty matrix results content (for one test-type) + testtype_results = {} + for testcase in _MATRIX_TESTCASES: + test_case_results = {} + testtype_results[testcase] = test_case_results + # Every testcase dict has an "input" slot with the test input spec, + # basically just to help human readability. + test_case_results["input"] = _MATRIX_TESTCASE_INPUTS[testcase] + for attrstyle in _ATTR_STYLES: + if testtype == "load": + # "load" test results have a "legacy" result (as for a single + # combined attrs dictionary), and a "newstyle" result (with + # the new split dictionary). + test_case_results[attrstyle] = { + "legacy": None, + "newstyle": None, + } + else: + # "save"/"roundtrip"-type results record 2 result sets, + # (unsplit/split) for each attribute-style + # - i.e. when saved without/with split_attrs_saving enabled. + test_case_results[attrstyle] = { + "unsplit": None, + "split": None, + } + + # Build complete data: matrix_results[TESTTYPES][TESTCASES][ATTR_STYLES] + matrix_results[testtype] = testtype_results + + # Pass through to all the tests : they can also update it, if enabled. + yield save_matrix_results, matrix_results + + if save_matrix_results: + for testtype in _MATRIX_TESTTYPES: + output_path = matrix_filepaths[testtype] + results = matrix_results[testtype] + with open(output_path, "w") as file_out: + json.dump(results, file_out, indent=2) + + +class TestRoundtrip(MixinAttrsTesting): + """ + Test handling of attributes in roundtrip netcdf-iris-netcdf. + + This behaviour should be (almost) unchanged by the adoption of + split-attribute handling. + + NOTE: the tested combinations in the 'TestLoad' test all match tests here, but not + *all* of the tests here are useful there. To avoid confusion (!) the ones which are + paralleled in TestLoad there have the identical test-names. However, as the tests + are all numbered that means there are missing numbers there. + The tests are numbered only so it is easier to review the discovered test list + (which is sorted). + + """ + + # Parametrise all tests over split/unsplit saving. + @pytest.fixture( + params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True + ) + def do_split(self, request): + do_split = request.param + self.save_split_attrs = do_split + return do_split + + def run_roundtrip_testcase(self, attr_name, values): + """ + Initialise the testcase from the passed-in controls, configure the input + files and run a save-load roundtrip to produce the output file. + + The name of the attribute, and the input and output temporary filepaths are + stored on the instance, where "self.check_roundtrip_results()" can get them. + + """ + self.run_testcase( + attr_name=attr_name, values=values, create_cubes_or_files="files" + ) + self.result_filepath = self._testfile_path("result") + + with warnings.catch_warnings(record=True) as captured_warnings: + # Do a load+save to produce a testable output result in a new file. + cubes = iris.load(self.input_filepaths) + # Ensure stable result order. + cubes = sorted(cubes, key=lambda cube: cube.name()) + do_split = getattr(self, "save_split_attrs", False) + kwargs = ( + dict(save_split_attrs=do_split) + if _SPLIT_SAVE_SUPPORTED + else dict() + ) + with iris.FUTURE.context(**kwargs): + iris.save(cubes, self.result_filepath) + + self.captured_warnings = captured_warnings + + def check_roundtrip_results(self, expected, expected_warnings=None): + """ + Run checks on the generated output file. + + The counterpart to :meth:`run_roundtrip_testcase`, with similar arguments. + Check existence (or not) of a global attribute, and a number of local + (variable) attributes. + Values of 'None' mean to check that the relevant global/local attribute does + *not* exist. + + Also check the warnings captured during the testcase run. + """ + # N.B. there is only ever one result-file, but it can contain various variables + # which came from different input files. + results = self.fetch_results(filepath=self.result_filepath) + assert results == expected + check_captured_warnings( + expected_warnings, + self.captured_warnings, + # N.B. only allow a legacy-attributes warning when NOT saving split-attrs + allow_possible_legacy_warning=not self.save_split_attrs, + ) + + ####################################################### + # Tests on "user-style" attributes. + # This means any arbitrary attribute which a user might have added -- i.e. one with + # a name which is *not* recognised in the netCDF or CF conventions. + # + + def test_01_userstyle_single_global(self): + self.run_roundtrip_testcase( + attr_name="myname", values=["single-value", None] + ) + # Default behaviour for a general global user-attribute. + # It simply remains global. + self.check_roundtrip_results(["single-value", None]) + + def test_02_userstyle_single_local(self, do_split): + # Default behaviour for a general local user-attribute. + # It results in a "promoted" global attribute. + self.run_roundtrip_testcase( + attr_name="myname", # A generic "user" attribute with no special handling + values=[None, "single-value"], + ) + if do_split: + expected = [None, "single-value"] + else: + expected = ["single-value", None] + self.check_roundtrip_results(expected) + + def test_03_userstyle_multiple_different(self, do_split): + # Default behaviour for general user-attributes. + # The global attribute is lost because there are local ones. + self.run_roundtrip_testcase( + attr_name="random", # A generic "user" attribute with no special handling + values=[ + ["common_global", "f1v1", "f1v2"], + ["common_global", "x1", "x2"], + ], + ) + expected_result = ["common_global", "f1v1", "f1v2", "x1", "x2"] + if not do_split: + # in legacy mode, global is lost + expected_result[0] = None + # just check they are all there and distinct + self.check_roundtrip_results(expected_result) + + def test_04_userstyle_matching_promoted(self, do_split): + # matching local user-attributes are "promoted" to a global one. + # (but not when saving split attributes) + input_values = ["global_file1", "same-value", "same-value"] + self.run_roundtrip_testcase( + attr_name="random", + values=input_values, + ) + if do_split: + expected = input_values + else: + expected = ["same-value", None, None] + self.check_roundtrip_results(expected) + + def test_05_userstyle_matching_crossfile_promoted(self, do_split): + # matching user-attributes are promoted, even across input files. + # (but not when saving split attributes) + self.run_roundtrip_testcase( + attr_name="random", + values=[ + ["global_file1", "same-value", "same-value"], + [None, "same-value", "same-value"], + ], + ) + if do_split: + # newstyle saves: locals are preserved, mismathced global is *lost* + expected_result = [ + None, + "same-value", + "same-value", + "same-value", + "same-value", + ] + # warnings about the clash + expected_warnings = [ + "Saving.* global attributes.* as local", + 'attributes.* of cube "var_0" were not saved', + 'attributes.* of cube "var_1" were not saved', + ] + else: + # oldstyle saves: matching locals promoted, override original global + expected_result = ["same-value", None, None, None, None] + expected_warnings = None + + self.check_roundtrip_results(expected_result, expected_warnings) + + def test_06_userstyle_nonmatching_remainlocal(self, do_split): + # Non-matching user attributes remain 'local' to the individual variables. + input_values = ["global_file1", "value-1", "value-2"] + if do_split: + # originals are preserved + expected_result = input_values + else: + # global is lost + expected_result = [None, "value-1", "value-2"] + self.run_roundtrip_testcase(attr_name="random", values=input_values) + self.check_roundtrip_results(expected_result) + + ####################################################### + # Tests on "Conventions" attribute. + # Note: the usual 'Conventions' behaviour is already tested elsewhere + # - see :class:`TestConventionsAttributes` above + # + # TODO: the name 'conventions' (lower-case) is also listed in _CF_GLOBAL_ATTRS, but + # we have excluded it from the global-attrs testing here. We probably still need to + # test what that does, though it's inclusion might simply be a mistake. + # + + def test_07_conventions_var_local(self): + # What happens if 'Conventions' appears as a variable-local attribute. + # N.B. this is not good CF, but we'll see what happens anyway. + self.run_roundtrip_testcase( + attr_name="Conventions", + values=[None, "user_set"], + ) + self.check_roundtrip_results(["CF-1.7", None]) + + def test_08_conventions_var_both(self): + # What happens if 'Conventions' appears as both global + local attribute. + self.run_roundtrip_testcase( + attr_name="Conventions", + values=["global-setting", "local-setting"], + ) + # standard content from Iris save + self.check_roundtrip_results(["CF-1.7", None]) + + ####################################################### + # Tests on "global" style attributes + # = those specific ones which 'ought' only to be global (except on collisions) + # + def test_09_globalstyle__global(self, global_attr): + attr_content = f"Global tracked {global_attr}" + self.run_roundtrip_testcase( + attr_name=global_attr, + values=[attr_content, None], + ) + self.check_roundtrip_results([attr_content, None]) + + def test_10_globalstyle__local(self, global_attr, do_split): + # Strictly, not correct CF, but let's see what it does with it. + attr_content = f"Local tracked {global_attr}" + input_values = [None, attr_content] + self.run_roundtrip_testcase( + attr_name=global_attr, + values=input_values, + ) + if do_split: + # remains local as supplied, but there is a warning + expected_result = input_values + expected_warning = f"'{global_attr}'.* should only be a CF global" + else: + # promoted to global + expected_result = [attr_content, None] + expected_warning = None + self.check_roundtrip_results(expected_result, expected_warning) + + def test_11_globalstyle__both(self, global_attr, do_split): + attr_global = f"Global-{global_attr}" + attr_local = f"Local-{global_attr}" + input_values = [attr_global, attr_local] + self.run_roundtrip_testcase( + attr_name=global_attr, + values=input_values, + ) + if do_split: + # remains local as supplied, but there is a warning + expected_result = input_values + expected_warning = "should only be a CF global" + else: + # promoted to global, no local value, original global lost + expected_result = [attr_local, None] + expected_warning = None + self.check_roundtrip_results(expected_result, expected_warning) + + def test_12_globalstyle__multivar_different(self, global_attr): + # Multiple *different* local settings are retained, not promoted + attr_1 = f"Local-{global_attr}-1" + attr_2 = f"Local-{global_attr}-2" + expect_warning = "should only be a CF global attribute" + # A warning should be raised when writing the result. + self.run_roundtrip_testcase( + attr_name=global_attr, + values=[None, attr_1, attr_2], + ) + self.check_roundtrip_results([None, attr_1, attr_2], expect_warning) + + def test_13_globalstyle__multivar_same(self, global_attr, do_split): + # Multiple *same* local settings are promoted to a common global one + attrval = f"Locally-defined-{global_attr}" + input_values = [None, attrval, attrval] + self.run_roundtrip_testcase( + attr_name=global_attr, + values=input_values, + ) + if do_split: + # remains local, but with a warning + expected_warning = "should only be a CF global" + expected_result = input_values + else: + # promoted to global + expected_warning = None + expected_result = [attrval, None, None] + self.check_roundtrip_results(expected_result, expected_warning) + + def test_14_globalstyle__multifile_different(self, global_attr, do_split): + # Different global attributes from multiple files are retained as local ones + attr_1 = f"Global-{global_attr}-1" + attr_2 = f"Global-{global_attr}-2" + self.run_roundtrip_testcase( + attr_name=global_attr, + values=[[attr_1, None], [attr_2, None]], + ) + # A warning should be raised when writing the result. + expected_warnings = ["should only be a CF global attribute"] + if do_split: + # An extra warning, only when saving with split-attributes. + expected_warnings = ["Saving.* as local"] + expected_warnings + self.check_roundtrip_results([None, attr_1, attr_2], expected_warnings) + + def test_15_globalstyle__multifile_same(self, global_attr): + # Matching global-type attributes in multiple files are retained as global + attrval = f"Global-{global_attr}" + self.run_roundtrip_testcase( + attr_name=global_attr, values=[[attrval, None], [attrval, None]] + ) + self.check_roundtrip_results([attrval, None, None]) + + ####################################################### + # Tests on "local" style attributes + # = those specific ones which 'ought' to appear attached to a variable, rather than + # being global + # + + @pytest.mark.parametrize("origin_style", ["input_global", "input_local"]) + def test_16_localstyle(self, local_attr, origin_style, do_split): + # local-style attributes should *not* get 'promoted' to global ones + # Set the name extension to avoid tests with different 'style' params having + # collisions over identical testfile names + self.testname_extension = origin_style + + attrval = f"Attr-setting-{local_attr}" + if local_attr == "missing_value": + # Special-cases : 'missing_value' type must be compatible with the variable + attrval = 303 + elif local_attr == "ukmo__process_flags": + # What this does when a GLOBAL attr seems to be weird + unintended. + # 'this' --> 't h i s' + attrval = "process" + # NOTE: it's also supposed to handle vector values - which we are not + # testing. + + # NOTE: results *should* be the same whether the original attribute is written + # as global or a variable attribute + if origin_style == "input_global": + # Record in source as a global attribute + values = [attrval, None] + else: + assert origin_style == "input_local" + # Record in source as a variable-local attribute + values = [None, attrval] + self.run_roundtrip_testcase(attr_name=local_attr, values=values) + + if ( + local_attr in ("missing_value", "standard_error_multiplier") + and origin_style == "input_local" + ): + # These ones are actually discarded by roundtrip. + # Not clear why, but for now this captures the facts. + expect_global = None + expect_var = None + else: + expect_global = None + if ( + local_attr == "ukmo__process_flags" + and origin_style == "input_global" + and not do_split + ): + # This is very odd behaviour + surely unintended. + # It's supposed to handle vector values (which we are not checking). + # But the weird behaviour only applies to the 'global' test, which is + # obviously not normal usage anyway. + attrval = "p r o c e s s" + expect_var = attrval + + if local_attr == "STASH" and ( + origin_style == "input_local" or not do_split + ): + # A special case, output translates this to a different attribute name. + self.attrname = "um_stash_source" + + expected_result = [expect_global, expect_var] + if do_split and origin_style == "input_global": + # The result is simply the "other way around" + expected_result = expected_result[::-1] + self.check_roundtrip_results(expected_result) + + @pytest.mark.parametrize("testcase", _MATRIX_TESTCASES[:max_param_attrs]) + @pytest.mark.parametrize("attrname", _MATRIX_ATTRNAMES) + def test_roundtrip_matrix( + self, testcase, attrname, matrix_results, do_split + ): + do_saves, matrix_results = matrix_results + split_param = "split" if do_split else "unsplit" + testcase_spec = matrix_results["roundtrip"][testcase] + input_spec = testcase_spec["input"] + values = decode_matrix_input(input_spec) + + self.run_roundtrip_testcase(attrname, values) + results = self.fetch_results(filepath=self.result_filepath) + result_spec = encode_matrix_result(results) + + attr_style = deduce_attr_style(attrname) + expected = testcase_spec[attr_style][split_param] + + if do_saves: + testcase_spec[attr_style][split_param] = result_spec + if expected is not None: + assert result_spec == expected + + +class TestLoad(MixinAttrsTesting): + """ + Test loading of file attributes into Iris cube attribute dictionaries. + + Tests loading of various combinations to cube dictionaries, treated as a + single combined result (i.e. not split). This behaviour should be (almost) + conserved with the adoption of split attributes **except possibly for key + orderings** -- i.e. we test only up to dictionary equality. + + NOTE: the tested combinations are identical to the roundtrip test. Test numbering + is kept the same, so some (which are inapplicable for this) are missing. + + """ + + def run_load_testcase(self, attr_name, values): + self.run_testcase( + attr_name=attr_name, values=values, create_cubes_or_files="files" + ) + + def check_load_results(self, expected, oldstyle_combined=False): + if not _SPLIT_SAVE_SUPPORTED and not oldstyle_combined: + # Don't check "newstyle" in the old world -- just skip it. + return + result_cubes = iris.load(self.input_filepaths) + results = self.fetch_results( + cubes=result_cubes, oldstyle_combined=oldstyle_combined + ) + # Standardise expected form to list(lists). + assert isinstance(expected, list) + if not isinstance(expected[0], list): + expected = [expected] + assert results == expected + + ####################################################### + # Tests on "user-style" attributes. + # This means any arbitrary attribute which a user might have added -- i.e. one with + # a name which is *not* recognised in the netCDF or CF conventions. + # + + def test_01_userstyle_single_global(self): + self.run_load_testcase( + attr_name="myname", values=["single_value", None, None] + ) + # Legacy-equivalent result check (single attributes dict per cube) + self.check_load_results( + [None, "single_value", "single_value"], + oldstyle_combined=True, + ) + # Full new-style results check + self.check_load_results(["single_value", None, None]) + + def test_02_userstyle_single_local(self): + # Default behaviour for a general local user-attribute. + # It is attached to only the specific cube. + self.run_load_testcase( + attr_name="myname", # A generic "user" attribute with no special handling + values=[None, "single-value", None], + ) + self.check_load_results( + [None, "single-value", None], oldstyle_combined=True + ) + self.check_load_results([None, "single-value", None]) + + def test_03_userstyle_multiple_different(self): + # Default behaviour for differing local user-attributes. + # The global attribute is simply lost, because there are local ones. + self.run_load_testcase( + attr_name="random", # A generic "user" attribute with no special handling + values=[ + ["global_file1", "f1v1", "f1v2"], + ["global_file2", "x1", "x2"], + ], + ) + self.check_load_results( + [None, "f1v1", "f1v2", "x1", "x2"], + oldstyle_combined=True, + ) + self.check_load_results( + [["global_file1", "f1v1", "f1v2"], ["global_file2", "x1", "x2"]] + ) + + def test_04_userstyle_multiple_same(self): + # Nothing special to note in this case + # TODO: ??remove?? + self.run_load_testcase( + attr_name="random", + values=["global_file1", "same-value", "same-value"], + ) + self.check_load_results( + oldstyle_combined=True, expected=[None, "same-value", "same-value"] + ) + self.check_load_results(["global_file1", "same-value", "same-value"]) + + ####################################################### + # Tests on "Conventions" attribute. + # Note: the usual 'Conventions' behaviour is already tested elsewhere + # - see :class:`TestConventionsAttributes` above + # + # TODO: the name 'conventions' (lower-case) is also listed in _CF_GLOBAL_ATTRS, but + # we have excluded it from the global-attrs testing here. We probably still need to + # test what that does, though it's inclusion might simply be a mistake. + # + + def test_07_conventions_var_local(self): + # What happens if 'Conventions' appears as a variable-local attribute. + # N.B. this is not good CF, but we'll see what happens anyway. + self.run_load_testcase( + attr_name="Conventions", + values=[None, "user_set"], + ) + # Legacy result + self.check_load_results([None, "user_set"], oldstyle_combined=True) + # Newstyle result + self.check_load_results([None, "user_set"]) + + def test_08_conventions_var_both(self): + # What happens if 'Conventions' appears as both global + local attribute. + self.run_load_testcase( + attr_name="Conventions", + values=["global-setting", "local-setting"], + ) + # (#1): legacy result : the global version gets lost. + self.check_load_results( + [None, "local-setting"], oldstyle_combined=True + ) + # (#2): newstyle results : retain both. + self.check_load_results(["global-setting", "local-setting"]) + + ####################################################### + # Tests on "global" style attributes + # = those specific ones which 'ought' only to be global (except on collisions) + # + + def test_09_globalstyle__global(self, global_attr): + attr_content = f"Global tracked {global_attr}" + self.run_load_testcase( + attr_name=global_attr, values=[attr_content, None] + ) + # (#1) legacy + self.check_load_results([None, attr_content], oldstyle_combined=True) + # (#2) newstyle : global status preserved. + self.check_load_results([attr_content, None]) + + def test_10_globalstyle__local(self, global_attr): + # Strictly, not correct CF, but let's see what it does with it. + attr_content = f"Local tracked {global_attr}" + self.run_load_testcase( + attr_name=global_attr, + values=[None, attr_content], + ) + # (#1): legacy result = treated the same as a global setting + self.check_load_results([None, attr_content], oldstyle_combined=True) + # (#2): newstyle result : remains local + self.check_load_results( + [None, attr_content], + ) + + def test_11_globalstyle__both(self, global_attr): + attr_global = f"Global-{global_attr}" + attr_local = f"Local-{global_attr}" + self.run_load_testcase( + attr_name=global_attr, + values=[attr_global, attr_local], + ) + # (#1) legacy result : promoted local setting "wins" + self.check_load_results([None, attr_local], oldstyle_combined=True) + # (#2) newstyle result : both retained + self.check_load_results([attr_global, attr_local]) + + def test_12_globalstyle__multivar_different(self, global_attr): + # Multiple *different* local settings are retained + attr_1 = f"Local-{global_attr}-1" + attr_2 = f"Local-{global_attr}-2" + self.run_load_testcase( + attr_name=global_attr, + values=[None, attr_1, attr_2], + ) + # (#1): legacy values, for cube.attributes viewed as a single dict + self.check_load_results([None, attr_1, attr_2], oldstyle_combined=True) + # (#2): exact results, with newstyle "split" cube attrs + self.check_load_results([None, attr_1, attr_2]) + + def test_14_globalstyle__multifile_different(self, global_attr): + # Different global attributes from multiple files + attr_1 = f"Global-{global_attr}-1" + attr_2 = f"Global-{global_attr}-2" + self.run_load_testcase( + attr_name=global_attr, + values=[[attr_1, None, None], [attr_2, None, None]], + ) + # (#1) legacy : multiple globals retained as local ones + self.check_load_results( + [None, attr_1, attr_1, attr_2, attr_2], oldstyle_combined=True + ) + # (#1) newstyle : result same as input + self.check_load_results([[attr_1, None, None], [attr_2, None, None]]) + + ####################################################### + # Tests on "local" style attributes + # = those specific ones which 'ought' to appear attached to a variable, rather than + # being global + # + + @pytest.mark.parametrize("origin_style", ["input_global", "input_local"]) + def test_16_localstyle(self, local_attr, origin_style): + # local-style attributes should *not* get 'promoted' to global ones + # Set the name extension to avoid tests with different 'style' params having + # collisions over identical testfile names + self.testname_extension = origin_style + + attrval = f"Attr-setting-{local_attr}" + if local_attr == "missing_value": + # Special-case : 'missing_value' type must be compatible with the variable + attrval = 303 + elif local_attr == "ukmo__process_flags": + # Another special case : the handling of this one is "unusual". + attrval = "process" + + # Create testfiles and load them, which should always produce a single cube. + if origin_style == "input_global": + # Record in source as a global attribute + values = [attrval, None] + else: + assert origin_style == "input_local" + # Record in source as a variable-local attribute + values = [None, attrval] + + self.run_load_testcase(attr_name=local_attr, values=values) + + # Work out the expected result. + result_value = attrval + # ... there are some special cases + if origin_style == "input_local": + if local_attr == "ukmo__process_flags": + # Some odd special behaviour here. + result_value = (result_value,) + elif local_attr in ("standard_error_multiplier", "missing_value"): + # For some reason, these ones never appear on the cube + result_value = None + + # NOTE: **legacy** result is the same, whether the original attribute was + # provided as a global or local attribute ... + expected_result_legacy = [None, result_value] + + # While 'newstyle' results preserve the input type local/global. + if origin_style == "input_local": + expected_result_newstyle = [None, result_value] + else: + expected_result_newstyle = [result_value, None] + + # (#1): legacy values, for cube.attributes viewed as a single dict + self.check_load_results(expected_result_legacy, oldstyle_combined=True) + # (#2): exact results, with newstyle "split" cube attrs + self.check_load_results(expected_result_newstyle) + + @pytest.mark.parametrize("testcase", _MATRIX_TESTCASES[:max_param_attrs]) + @pytest.mark.parametrize("attrname", _MATRIX_ATTRNAMES) + @pytest.mark.parametrize("resultstyle", _MATRIX_LOAD_RESULTSTYLES) + def test_load_matrix( + self, testcase, attrname, matrix_results, resultstyle + ): + do_saves, matrix_results = matrix_results + testcase_spec = matrix_results["load"][testcase] + input_spec = testcase_spec["input"] + values = decode_matrix_input(input_spec) + + self.run_load_testcase(attrname, values) + + result_cubes = iris.load(self.input_filepaths) + do_combined = resultstyle == "legacy" + results = self.fetch_results( + cubes=result_cubes, oldstyle_combined=do_combined + ) + result_spec = encode_matrix_result(results) + + attr_style = deduce_attr_style(attrname) + expected = testcase_spec[attr_style][resultstyle] + + if do_saves: + testcase_spec[attr_style][resultstyle] = result_spec + if expected is not None: + assert result_spec == expected + + +class TestSave(MixinAttrsTesting): + """ + Test saving from cube attributes dictionary (various categories) into files. + + """ + + # Parametrise all tests over split/unsplit saving. + @pytest.fixture( + params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True + ) + def do_split(self, request): + do_split = request.param + self.save_split_attrs = do_split + return do_split + + def run_save_testcase(self, attr_name: str, values: list): + # Create input cubes. + self.run_testcase( + attr_name=attr_name, + values=values, + create_cubes_or_files="cubes", + ) + + # Save input cubes to a temporary result file. + with warnings.catch_warnings(record=True) as captured_warnings: + self.result_filepath = self._testfile_path("result") + do_split = getattr(self, "save_split_attrs", False) + kwargs = ( + dict(save_split_attrs=do_split) + if _SPLIT_SAVE_SUPPORTED + else dict() + ) + with iris.FUTURE.context(**kwargs): + iris.save(self.input_cubes, self.result_filepath) + + self.captured_warnings = captured_warnings + + def run_save_testcase_legacytype(self, attr_name: str, values: list): + """ + Legacy-type means : before cubes had split attributes. + + This just means we have only one "set" of cubes, with ***no*** distinct global + attribute. + """ + if not isinstance(values, list): + # Translate single input value to list-of-1 + values = [values] + + self.run_save_testcase(attr_name, [None] + values) + + def check_save_results( + self, expected: list, expected_warnings: List[str] = None + ): + results = self.fetch_results(filepath=self.result_filepath) + assert results == expected + check_captured_warnings( + expected_warnings, + self.captured_warnings, + # N.B. only allow a legacy-attributes warning when NOT saving split-attrs + allow_possible_legacy_warning=not self.save_split_attrs, + ) + + def test_userstyle__single(self, do_split): + self.run_save_testcase_legacytype("random", "value-x") + if do_split: + # result as input values + expected_result = [None, "value-x"] + else: + # in legacy mode, promoted = stored as a *global* by default. + expected_result = ["value-x", None] + self.check_save_results(expected_result) + + def test_userstyle__multiple_same(self, do_split): + self.run_save_testcase_legacytype("random", ["value-x", "value-x"]) + if do_split: + # result as input values + expected_result = [None, "value-x", "value-x"] + else: + # in legacy mode, promoted = stored as a *global* by default. + expected_result = ["value-x", None, None] + self.check_save_results(expected_result) + + def test_userstyle__multiple_different(self): + # Clashing values are stored as locals on the individual variables. + self.run_save_testcase_legacytype("random", ["value-A", "value-B"]) + self.check_save_results([None, "value-A", "value-B"]) + + def test_userstyle__multiple_onemissing(self): + # Multiple user-type, with one missing, behave like different values. + self.run_save_testcase_legacytype( + "random", + ["value", None], + ) + # Stored as locals when there are differing values. + self.check_save_results([None, "value", None]) + + def test_Conventions__single(self): + self.run_save_testcase_legacytype("Conventions", "x") + # Always discarded + replaced by a single global setting. + self.check_save_results(["CF-1.7", None]) + + def test_Conventions__multiple_same(self): + self.run_save_testcase_legacytype( + "Conventions", ["same-value", "same-value"] + ) + # Always discarded + replaced by a single global setting. + self.check_save_results(["CF-1.7", None, None]) + + def test_Conventions__multiple_different(self): + self.run_save_testcase_legacytype( + "Conventions", ["value-A", "value-B"] + ) + # Always discarded + replaced by a single global setting. + self.check_save_results(["CF-1.7", None, None]) + + def test_globalstyle__single(self, global_attr, do_split): + self.run_save_testcase_legacytype(global_attr, ["value"]) + if do_split: + # result as input values + expected_warning = "should only be a CF global" + expected_result = [None, "value"] + else: + # in legacy mode, promoted + expected_warning = None + expected_result = ["value", None] + self.check_save_results(expected_result, expected_warning) + + def test_globalstyle__multiple_same(self, global_attr, do_split): + # Multiple global-type with same values are made global. + self.run_save_testcase_legacytype( + global_attr, + ["value-same", "value-same"], + ) + if do_split: + # result as input values + expected_result = [None, "value-same", "value-same"] + expected_warning = "should only be a CF global attribute" + else: + # in legacy mode, promoted + expected_result = ["value-same", None, None] + expected_warning = None + self.check_save_results(expected_result, expected_warning) + + def test_globalstyle__multiple_different(self, global_attr): + # Multiple global-type with different values become local, with warning. + self.run_save_testcase_legacytype(global_attr, ["value-A", "value-B"]) + # *Only* stored as locals when there are differing values. + msg_regexp = ( + f"'{global_attr}' is being added as CF data variable attribute," + f".* should only be a CF global attribute." + ) + self.check_save_results( + [None, "value-A", "value-B"], expected_warnings=msg_regexp + ) + + def test_globalstyle__multiple_onemissing(self, global_attr): + # Multiple global-type, with one missing, behave like different values. + self.run_save_testcase_legacytype( + global_attr, ["value", "value", None] + ) + # Stored as locals when there are differing values. + msg_regexp = ( + f"'{global_attr}' is being added as CF data variable attribute," + f".* should only be a CF global attribute." + ) + self.check_save_results( + [None, "value", "value", None], expected_warnings=msg_regexp + ) + + def test_localstyle__single(self, local_attr): + self.run_save_testcase_legacytype(local_attr, ["value"]) + + # Defaults to local + expected_results = [None, "value"] + # .. but a couple of special cases + if local_attr == "ukmo__process_flags": + # A particular, really weird case + expected_results = [None, "v a l u e"] + elif local_attr == "STASH": + # A special case : the stored name is different + self.attrname = "um_stash_source" + + self.check_save_results(expected_results) + + def test_localstyle__multiple_same(self, local_attr): + self.run_save_testcase_legacytype( + local_attr, ["value-same", "value-same"] + ) + + # They remain separate + local + expected_results = [None, "value-same", "value-same"] + if local_attr == "ukmo__process_flags": + # A particular, really weird case + expected_results = [ + None, + "v a l u e - s a m e", + "v a l u e - s a m e", + ] + elif local_attr == "STASH": + # A special case : the stored name is different + self.attrname = "um_stash_source" + + self.check_save_results(expected_results) + + def test_localstyle__multiple_different(self, local_attr): + self.run_save_testcase_legacytype(local_attr, ["value-A", "value-B"]) + # Different values are treated just the same as matching ones. + expected_results = [None, "value-A", "value-B"] + if local_attr == "ukmo__process_flags": + # A particular, really weird case + expected_results = [ + None, + "v a l u e - A", + "v a l u e - B", + ] + elif local_attr == "STASH": + # A special case : the stored name is different + self.attrname = "um_stash_source" + self.check_save_results(expected_results) + + # + # Test handling of newstyle independent global+local cube attributes. + # + def test_globallocal_clashing(self, do_split): + # A cube has clashing local + global attrs. + original_values = ["valueA", "valueB"] + self.run_save_testcase("userattr", original_values) + expected_result = original_values.copy() + if not do_split: + # in legacy mode, "promote" = lose the local one + expected_result[0] = expected_result[1] + expected_result[1] = None + self.check_save_results(expected_result) + + def test_globallocal_oneeach_same(self, do_split): + # One cube with global attr, another with identical local one. + self.run_save_testcase( + "userattr", values=[[None, "value"], ["value", None]] + ) + if do_split: + expected = [None, "value", "value"] + expected_warning = ( + r"Saving the cube global attributes \['userattr'\] as local" + ) + else: + # N.B. legacy code sees only two equal values (and promotes). + expected = ["value", None, None] + expected_warning = None + + self.check_save_results(expected, expected_warning) + + def test_globallocal_oneeach_different(self, do_split): + # One cube with global attr, another with a *different* local one. + self.run_save_testcase( + "userattr", [[None, "valueA"], ["valueB", None]] + ) + if do_split: + warning = ( + r"Saving the cube global attributes \['userattr'\] as local" + ) + else: + # N.B. legacy code does not warn of global-to-local "demotion". + warning = None + self.check_save_results([None, "valueA", "valueB"], warning) + + def test_globallocal_one_other_clashingglobals(self, do_split): + # Two cubes with both, second cube has a clashing global attribute. + self.run_save_testcase( + "userattr", + values=[["valueA", "valueB"], ["valueXXX", "valueB"]], + ) + if do_split: + expected = [None, "valueB", "valueB"] + expected_warnings = [ + "Saving.* global attributes.* as local", + 'attributes.* of cube "v1" were not saved', + 'attributes.* of cube "v2" were not saved', + ] + else: + # N.B. legacy code sees only the locals, and promotes them. + expected = ["valueB", None, None] + expected_warnings = None + self.check_save_results(expected, expected_warnings) + + def test_globallocal_one_other_clashinglocals(self, do_split): + # Two cubes with both, second cube has a clashing local attribute. + inputs = [["valueA", "valueB"], ["valueA", "valueXXX"]] + if do_split: + expected = ["valueA", "valueB", "valueXXX"] + else: + # N.B. legacy code sees only the locals. + expected = [None, "valueB", "valueXXX"] + self.run_save_testcase("userattr", values=inputs) + self.check_save_results(expected) + + @pytest.mark.parametrize("testcase", _MATRIX_TESTCASES[:max_param_attrs]) + @pytest.mark.parametrize("attrname", _MATRIX_ATTRNAMES) + def test_save_matrix(self, testcase, attrname, matrix_results, do_split): + do_saves, matrix_results = matrix_results + split_param = "split" if do_split else "unsplit" + testcase_spec = matrix_results["save"][testcase] + input_spec = testcase_spec["input"] + values = decode_matrix_input(input_spec) + + self.run_save_testcase(attrname, values) + results = self.fetch_results(filepath=self.result_filepath) + result_spec = encode_matrix_result(results) + + attr_style = deduce_attr_style(attrname) + expected = testcase_spec[attr_style][split_param] + + if do_saves: + testcase_spec[attr_style][split_param] = result_spec + if expected is not None: + assert result_spec == expected diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py index 054fd3a20b..7c11fde55d 100644 --- a/lib/iris/tests/test_merge.py +++ b/lib/iris/tests/test_merge.py @@ -21,6 +21,7 @@ from iris._lazy_data import as_lazy_data from iris.coords import AuxCoord, DimCoord import iris.cube +from iris.cube import CubeAttrsDict import iris.exceptions import iris.tests.stock @@ -1107,5 +1108,86 @@ def test_ancillary_variable_error_msg(self): _ = iris.cube.CubeList([cube1, cube2]).merge_cube() +class TestCubeMerge__split_attributes__error_messages(tests.IrisTest): + """ + Specific tests for the detection and wording of attribute-mismatch errors. + + In particular, the adoption of 'split' attributes with the new + :class:`iris.cube.CubeAttrsDict` introduces some more subtle possible discrepancies + in attributes, where this has also impacted the messaging, so this aims to probe + those cases. + """ + + def _check_merge_error(self, attrs_1, attrs_2, expected_message): + """ + Check the error from a merge failure caused by a mismatch of attributes. + + Build a pair of cubes with given attributes, merge them + check for a match + to the expected error message. + """ + cube_1 = iris.cube.Cube( + [0], + aux_coords_and_dims=[(AuxCoord([1], long_name="x"), None)], + attributes=attrs_1, + ) + cube_2 = iris.cube.Cube( + [0], + aux_coords_and_dims=[(AuxCoord([2], long_name="x"), None)], + attributes=attrs_2, + ) + with self.assertRaisesRegex( + iris.exceptions.MergeError, expected_message + ): + iris.cube.CubeList([cube_1, cube_2]).merge_cube() + + def test_keys_differ__single(self): + self._check_merge_error( + attrs_1=dict(a=1, b=2), + attrs_2=dict(a=1), + # Note: matching key 'a' does *not* appear in the message + expected_message="cube.attributes keys differ: 'b'", + ) + + def test_keys_differ__multiple(self): + self._check_merge_error( + attrs_1=dict(a=1, b=2), + attrs_2=dict(a=1, c=2), + expected_message="cube.attributes keys differ: 'b', 'c'", + ) + + def test_values_differ__single(self): + self._check_merge_error( + attrs_1=dict(a=1, b=2), # Note: matching key 'a' does not appear + attrs_2=dict(a=1, b=3), + expected_message="cube.attributes values differ for keys: 'b'", + ) + + def test_values_differ__multiple(self): + self._check_merge_error( + attrs_1=dict(a=1, b=2), + attrs_2=dict(a=12, b=22), + expected_message="cube.attributes values differ for keys: 'a', 'b'", + ) + + def test_splitattrs_keys_local_global_mismatch(self): + # Since Cube.attributes is now a "split-attributes" dictionary, it is now + # possible to have "cube1.attributes != cube1.attributes", but also + # "set(cube1.attributes.keys()) == set(cube2.attributes.keys())". + # I.E. it is now necessary to specifically compare ".globals" and ".locals" to + # see *what* differs between two attributes dictionaries. + self._check_merge_error( + attrs_1=CubeAttrsDict(globals=dict(a=1), locals=dict(b=2)), + attrs_2=CubeAttrsDict(locals=dict(a=2)), + expected_message="cube.attributes keys differ: 'a', 'b'", + ) + + def test_splitattrs_keys_local_match_masks_global_mismatch(self): + self._check_merge_error( + attrs_1=CubeAttrsDict(globals=dict(a=1), locals=dict(a=3)), + attrs_2=CubeAttrsDict(globals=dict(a=2), locals=dict(a=3)), + expected_message="cube.attributes values differ for keys: 'a'", + ) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 382607dca5..4425ba62d7 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -15,8 +15,11 @@ import unittest.mock as mock from unittest.mock import sentinel +import pytest + from iris.common.lenient import _LENIENT, _qualname from iris.common.metadata import BaseMetadata, CubeMetadata +from iris.cube import CubeAttrsDict def _make_metadata( @@ -90,9 +93,360 @@ def test_bases(self): self.assertTrue(issubclass(self.cls, BaseMetadata)) -class Test___eq__(tests.IrisTest): - def setUp(self): - self.values = dict( +@pytest.fixture(params=CubeMetadata._fields) +def fieldname(request): + """Parametrize testing over all CubeMetadata field names.""" + return request.param + + +@pytest.fixture(params=["strict", "lenient"]) +def op_leniency(request): + """Parametrize testing over strict or lenient operation.""" + return request.param + + +@pytest.fixture(params=["primaryAA", "primaryAX", "primaryAB"]) +def primary_values(request): + """ + Parametrize over the possible non-trivial pairs of operation values. + + The parameters all provide two attribute values which are the left- and right-hand + arguments to the tested operation. The attribute values are single characters from + the end of the parameter name -- except that "X" denotes a "missing" attribute. + + The possible cases are: + + * one side has a value and the other is missing + * left and right have the same non-missing value + * left and right have different non-missing values + """ + return request.param + + +@pytest.fixture(params=[False, True], ids=["primaryLocal", "primaryGlobal"]) +def primary_is_global_not_local(request): + """Parametrize split-attribute testing over "global" or "local" attribute types.""" + return request.param + + +@pytest.fixture(params=[False, True], ids=["leftrightL2R", "leftrightR2L"]) +def order_reversed(request): + """Parametrize split-attribute testing over "left OP right" or "right OP left".""" + return request.param + + +# Define the expected results for split-attribute testing. +# This dictionary records the expected results for the various possible arrangements of +# values of a single attribute in the "left" and "right" inputs of a CubeMetadata +# operation. +# The possible operations are "equal", "combine" or "difference", and may all be +# performed "strict" or "lenient". +# N.B. the *same* results should also apply when left+right are swapped, with a suitable +# adjustment to the result value. Likewise, results should be the same for either +# global- or local-style attributes. +_ALL_RESULTS = { + "equal": { + "primaryAA": {"lenient": True, "strict": True}, + "primaryAX": {"lenient": True, "strict": False}, + "primaryAB": {"lenient": False, "strict": False}, + }, + "combine": { + "primaryAA": {"lenient": "A", "strict": "A"}, + "primaryAX": {"lenient": "A", "strict": None}, + "primaryAB": {"lenient": None, "strict": None}, + }, + "difference": { + "primaryAA": {"lenient": None, "strict": None}, + "primaryAX": {"lenient": None, "strict": ("A", None)}, + "primaryAB": {"lenient": ("A", "B"), "strict": ("A", "B")}, + }, +} +# A fixed attribute name used for all the split-attribute testing. +_TEST_ATTRNAME = "_test_attr_" + + +def extract_attribute_value(split_dict, extract_global): + """ + Extract a test-attribute value from a split-attribute dictionary. + + Parameters + ---------- + split_dict : CubeAttrsDict + a split dictionary from an operation result + extract_global : bool + whether to extract values of the global, or local, `_TEST_ATTRNAME` attribute + + Returns + ------- + str | None + """ + if extract_global: + result = split_dict.globals.get(_TEST_ATTRNAME, None) + else: + result = split_dict.locals.get(_TEST_ATTRNAME, None) + return result + + +def extract_result_value(input, extract_global): + """ + Extract the values(s) of the main test attribute from an operation result. + + Parameters + ---------- + input : bool | CubeMetadata + an operation result : the structure varies for the three different operations. + extract_global : bool + whether to return values of a global, or local, `_TEST_ATTRNAME` attribute. + + Returns + ------- + None | bool | str | tuple[None | str] + result value(s) + """ + if not isinstance(input, CubeMetadata): + # Result is either boolean (for "equals") or a None (for "difference"). + result = input + else: + # Result is a CubeMetadata. Get the value(s) of the required attribute. + result = input.attributes + + if isinstance(result, CubeAttrsDict): + result = extract_attribute_value(result, extract_global) + else: + # For "difference", input.attributes is a *pair* of dictionaries. + assert isinstance(result, tuple) + result = tuple( + [ + extract_attribute_value(dic, extract_global) + for dic in result + ] + ) + if result == (None, None): + # This value occurs when the desired attribute is *missing* from a + # difference result, but other (secondary) attributes were *different*. + # We want only differences of the *target* attribute, so convert these + # to a plain 'no difference', for expected-result testing purposes. + result = None + + return result + + +def make_attrsdict(value): + """ + Return a dictionary containing a test attribute with the given value. + + If the value is "X", the attribute is absent (result is empty dict). + """ + if value == "X": + # Translate an "X" input as "missing". + result = {} + else: + result = {_TEST_ATTRNAME: value} + return result + + +def check_splitattrs_testcase( + operation_name: str, + check_is_lenient: bool, + primary_inputs: str = "AA", # two character values + secondary_inputs: str = "XX", # two character values + check_global_not_local: bool = True, + check_reversed: bool = False, +): + """ + Test a metadata operation with split-attributes against known expected results. + + Parameters + ---------- + operation_name : str + One of "equal", "combine" or "difference. + check_is_lenient : bool + Whether the tested operation is performed 'lenient' or 'strict'. + primary_inputs : str + A pair of characters defining left + right attribute values for the operands of + the operation. + secondary_inputs : str + A further pair of values for an attribute of the same name but "other" type + ( i.e. global/local when the main test is local/global ). + check_global_not_local : bool + If `True` then the primary operands, and the tested result values, are *global* + attributes, and the secondary ones are local. + Otherwise, the other way around. + check_reversed : bool + If True, the left and right operands are exchanged, and the expected value + modified according. + + Notes + ----- + The expected result of an operation is mostly defined by : the operation applied; + the main "primary" inputs; and the lenient/strict mode. + + In the case of the "equals" operation, however, the expected result is simply + set to `False` if the secondary inputs do not match. + + Calling with different values for the keywords aims to show that the main operation + has the expected value, from _ALL_RESULTS, the ***same in essentially all cases*** + ( though modified in specific ways for some factors ). + + This regularity also demonstrates the required independence over the other + test-factors, i.e. global/local attribute type, and right-left order. + """ + # Just for comfort, check that inputs are all one of a few single characters. + assert all( + (item in list("ABCDX")) for item in (primary_inputs + secondary_inputs) + ) + # Interpret "primary" and "secondary" inputs as "global" and "local" attributes. + if check_global_not_local: + global_values, local_values = primary_inputs, secondary_inputs + else: + local_values, global_values = primary_inputs, secondary_inputs + + # Form 2 inputs to the operation : Make left+right split-attribute input + # dictionaries, with both the primary and secondary attribute value settings. + input_dicts = [ + CubeAttrsDict( + globals=make_attrsdict(global_value), + locals=make_attrsdict(local_value), + ) + for global_value, local_value in zip(global_values, local_values) + ] + # Make left+right CubeMetadata with those attributes, other fields all blank. + input_l, input_r = [ + CubeMetadata( + **{ + field: attrs if field == "attributes" else None + for field in CubeMetadata._fields + } + ) + for attrs in input_dicts + ] + + if check_reversed: + # Swap the inputs to perform a 'reversed' calculation. + input_l, input_r = input_r, input_l + + # Run the actual operation + result = getattr(input_l, operation_name)( + input_r, lenient=check_is_lenient + ) + + if operation_name == "difference" and check_reversed: + # Adjust the result of a "reversed" operation to the 'normal' way round. + # ( N.B. only "difference" results are affected by reversal. ) + if isinstance(result, CubeMetadata): + result = result._replace(attributes=result.attributes[::-1]) + + # Extract, from the operation result, the value to be tested against "expected". + result = extract_result_value(result, check_global_not_local) + + # Get the *expected* result for this operation. + which = "lenient" if check_is_lenient else "strict" + primary_key = "primary" + primary_inputs + expected = _ALL_RESULTS[operation_name][primary_key][which] + if operation_name == "equal" and expected: + # Account for the equality cases made `False` by mismatched secondary values. + left, right = secondary_inputs + secondaries_same = left == right or ( + check_is_lenient and "X" in (left, right) + ) + if not secondaries_same: + expected = False + + # Check that actual extracted operation result matches the "expected" one. + assert result == expected + + +class MixinSplitattrsMatrixTests: + """ + Define split-attributes tests to perform on all the metadata operations. + + This is inherited by the testclass for each operation : + i.e. Test___eq__, Test_combine and Test_difference + """ + + # Define the operation name : set in each inheritor + operation_name = None + + def test_splitattrs_cases( + self, + op_leniency, + primary_values, + primary_is_global_not_local, + order_reversed, + ): + """ + Check the basic operation against the expected result from _ALL_RESULTS. + + Parametrisation checks this for all combinations of various factors : + + * possible arrangements of the primary values + * strict and lenient + * global- and local-type attributes + * left-to-right or right-to-left operation order. + """ + primary_inputs = primary_values[-2:] + check_is_lenient = {"strict": False, "lenient": True}[op_leniency] + check_splitattrs_testcase( + operation_name=self.operation_name, + check_is_lenient=check_is_lenient, + primary_inputs=primary_inputs, + secondary_inputs="XX", + check_global_not_local=primary_is_global_not_local, + check_reversed=order_reversed, + ) + + @pytest.mark.parametrize( + "secondary_values", + [ + "secondaryXX", + "secondaryCX", + "secondaryXC", + "secondaryCC", + "secondaryCD", + ] + # NOTE: test CX as well as XC, since primary choices has "AX" but not "XA". + ) + def test_splitattrs_global_local_independence( + self, + op_leniency, + primary_values, + secondary_values, + ): + """ + Check that results are (mostly) independent of the "other" type attributes. + + The operation on attributes of the 'primary' type (global/local) should be + basically unaffected by those of the 'secondary' type (--> local/global). + + This is not really true for equality, so we adjust those results to compensate. + See :func:`check_splitattrs_testcase` for explanations. + + Notes + ----- + We provide this *separate* test for global/local attribute independence, + parametrized over selected relevant arrangements of the 'secondary' values. + We *don't* test with reversed order or "local" primary inputs, because matrix + testing over *all* relevant factors produces too many possible combinations. + """ + primary_inputs = primary_values[-2:] + secondary_inputs = secondary_values[-2:] + check_is_lenient = {"strict": False, "lenient": True}[op_leniency] + check_splitattrs_testcase( + operation_name=self.operation_name, + check_is_lenient=check_is_lenient, + primary_inputs=primary_inputs, + secondary_inputs=secondary_inputs, + check_global_not_local=True, + check_reversed=False, + ) + + +class Test___eq__(MixinSplitattrsMatrixTests): + operation_name = "equal" + + @pytest.fixture(autouse=True) + def setup(self): + self.lvalues = dict( standard_name=sentinel.standard_name, long_name=sentinel.long_name, var_name=sentinel.var_name, @@ -101,17 +455,19 @@ def setUp(self): attributes=dict(), cell_methods=sentinel.cell_methods, ) + # Setup another values tuple with all-distinct content objects. + self.rvalues = deepcopy(self.lvalues) self.dummy = sentinel.dummy self.cls = CubeMetadata def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) + assert self.cls.__eq__.__doc__ == BaseMetadata.__eq__.__doc__ def test_lenient_service(self): qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) + assert qualname___eq__ in _LENIENT + assert _LENIENT[qualname___eq__] + assert _LENIENT[self.cls.__eq__] def test_call(self): other = sentinel.other @@ -122,107 +478,114 @@ def test_call(self): ) as mocker: result = metadata.__eq__(other) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_cell_methods_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_cell_methods(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_cell_methods(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_measure_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + assert return_value == result + assert mocker.call_args_list == [mock.call(other)] + + def test_op_same(self, op_leniency): + # Check op all-same content, but all-new data. + # NOTE: test for both strict/lenient, should both work the same. + is_lenient = op_leniency == "lenient" + lmetadata = self.cls(**self.lvalues) + rmetadata = self.cls(**self.rvalues) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check equality both l==r and r==l. + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) + + def test_op_different__none(self, fieldname, op_leniency): + # One side has field=value, and the other field=None, both strict + lenient. + if fieldname == "attributes": + # Must be a dict, cannot be None. + pytest.skip() + else: + is_lenient = op_leniency == "lenient" + lmetadata = self.cls(**self.lvalues) + self.rvalues.update({fieldname: None}) + rmetadata = self.cls(**self.rvalues) + if fieldname in ("cell_methods", "standard_name", "units"): + # These ones are compared strictly + expect_success = False + elif fieldname in ("var_name", "long_name"): + # For other 'normal' fields : lenient succeeds, strict does not. + expect_success = is_lenient + else: + # Ensure we are handling all the different field cases + raise ValueError( + f"{self.__name__} unhandled fieldname : {fieldname}" + ) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check equality both l==r and r==l. + assert lmetadata.__eq__(rmetadata) == expect_success + assert rmetadata.__eq__(lmetadata) == expect_success + + def test_op_different__value(self, fieldname, op_leniency): + # Compare when a given field value is changed, both strict + lenient. + if fieldname == "attributes": + # Dicts have more possibilities: handled separately. + pytest.skip() + else: + is_lenient = op_leniency == "lenient" + lmetadata = self.cls(**self.lvalues) + self.rvalues.update({fieldname: self.dummy}) + rmetadata = self.cls(**self.rvalues) + if fieldname in ( + "cell_methods", + "standard_name", + "units", + "long_name", + ): + # These ones are compared strictly + expect_success = False + elif fieldname == "var_name": + # For other 'normal' fields : lenient succeeds, strict does not. + expect_success = is_lenient + else: + # Ensure we are handling all the different field cases + raise ValueError( + f"{self.__name__} unhandled fieldname : {fieldname}" + ) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check equality both l==r and r==l. + assert lmetadata.__eq__(rmetadata) == expect_success + assert rmetadata.__eq__(lmetadata) == expect_success + + def test_op_different__attribute_extra(self, op_leniency): + # Check when one set of attributes has an extra entry. + is_lenient = op_leniency == "lenient" + lmetadata = self.cls(**self.lvalues) + self.rvalues["attributes"]["_extra_"] = 1 + rmetadata = self.cls(**self.rvalues) + # This counts as equal *only* in the lenient case. + expect_success = is_lenient + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check equality both l==r and r==l. + assert lmetadata.__eq__(rmetadata) == expect_success + assert rmetadata.__eq__(lmetadata) == expect_success + + def test_op_different__attribute_value(self, op_leniency): + # lhs and rhs have different values for an attribute, both strict + lenient. + is_lenient = op_leniency == "lenient" + self.lvalues["attributes"]["_extra_"] = mock.sentinel.value1 + self.rvalues["attributes"]["_extra_"] = mock.sentinel.value2 + lmetadata = self.cls(**self.lvalues) + rmetadata = self.cls(**self.rvalues) + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # This should ALWAYS fail. + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) class Test___lt__(tests.IrisTest): @@ -256,9 +619,12 @@ def test__ignore_attributes_cell_methods(self): self.assertFalse(result) -class Test_combine(tests.IrisTest): - def setUp(self): - self.values = dict( +class Test_combine(MixinSplitattrsMatrixTests): + operation_name = "combine" + + @pytest.fixture(autouse=True) + def setup(self): + self.lvalues = dict( standard_name=sentinel.standard_name, long_name=sentinel.long_name, var_name=sentinel.var_name, @@ -266,20 +632,20 @@ def setUp(self): attributes=sentinel.attributes, cell_methods=sentinel.cell_methods, ) + # Get a second copy with all-new objects. + self.rvalues = deepcopy(self.lvalues) self.dummy = sentinel.dummy self.cls = CubeMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) + assert self.cls.combine.__doc__ == BaseMetadata.combine.__doc__ def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) + assert qualname_combine in _LENIENT + assert _LENIENT[qualname_combine] + assert _LENIENT[self.cls.combine] def test_lenient_default(self): other = sentinel.other @@ -289,11 +655,8 @@ def test_lenient_default(self): ) as mocker: result = self.none.combine(other) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) + assert return_value == result + assert mocker.call_args_list == [mock.call(other, lenient=None)] def test_lenient(self): other = sentinel.other @@ -304,149 +667,165 @@ def test_lenient(self): ) as mocker: result = self.none.combine(other, lenient=lenient) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) + assert return_value == result + assert mocker.call_args_list == [mock.call(other, lenient=lenient)] + + def test_op_same(self, op_leniency): + # Result is same as either input, both strict + lenient. + is_lenient = op_leniency == "lenient" + lmetadata = self.cls(**self.lvalues) + rmetadata = self.cls(**self.rvalues) + expected = self.lvalues + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + def test_op_different__none(self, fieldname, op_leniency): + # One side has field=value, and the other field=None, both strict + lenient. + if fieldname == "attributes": + # Can't be None : Tested separately + pytest.skip() + + is_lenient = op_leniency == "lenient" + + lmetadata = self.cls(**self.lvalues) + # Cancel one setting in the rhs argument. + self.rvalues[fieldname] = None + rmetadata = self.cls(**self.rvalues) + + if fieldname in ("cell_methods", "units"): + # NB cell-methods and units *always* strict behaviour. + # strict form : take only those which both have set + strict_result = True + elif fieldname in ("standard_name", "long_name", "var_name"): + strict_result = not is_lenient + else: + # Ensure we are handling all the different field cases + raise ValueError( + f"{self.__name__} unhandled fieldname : {fieldname}" + ) - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_cell_methods_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - expected = right.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["units"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different_cell_methods(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["cell_methods"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_cell_methods(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["cell_methods"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_cell_methods_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["cell_methods"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - -class Test_difference(tests.IrisTest): - def setUp(self): - self.values = dict( + if strict_result: + # include only those which both have + expected = self.rvalues + else: + # also include those which only 1 has + expected = self.lvalues + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + def test_op_different__value(self, fieldname, op_leniency): + # One field has different value for lhs/rhs, both strict + lenient. + if fieldname == "attributes": + # Attribute behaviours are tested separately + pytest.skip() + + is_lenient = op_leniency == "lenient" + + self.lvalues[fieldname] = mock.sentinel.value1 + self.rvalues[fieldname] = mock.sentinel.value2 + lmetadata = self.cls(**self.lvalues) + rmetadata = self.cls(**self.rvalues) + + # In all cases, this field should be None in the result : leniency has no effect + expected = self.lvalues.copy() + expected[fieldname] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + def test_op_different__attribute_extra(self, op_leniency): + # One field has an extra attribute, both strict + lenient. + is_lenient = op_leniency == "lenient" + + self.lvalues["attributes"] = {"_a_common_": mock.sentinel.dummy} + self.rvalues["attributes"] = self.lvalues["attributes"].copy() + self.rvalues["attributes"]["_extra_"] = mock.sentinel.testvalue + lmetadata = self.cls(**self.lvalues) + rmetadata = self.cls(**self.rvalues) + + if is_lenient: + # the extra attribute should appear in the result .. + expected = self.rvalues + else: + # .. it should not + expected = self.lvalues + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + def test_op_different__attribute_value(self, op_leniency): + # lhs and rhs have different values for an attribute, both strict + lenient. + is_lenient = op_leniency == "lenient" + + self.lvalues["attributes"] = { + "_a_common_": self.dummy, + "_b_common_": mock.sentinel.value1, + } + self.lvalues["attributes"] = { + "_a_common_": self.dummy, + "_b_common_": mock.sentinel.value2, + } + lmetadata = self.cls(**self.lvalues) + rmetadata = self.cls(**self.rvalues) + + # Result has entirely EMPTY attributes (whether strict or lenient). + # TODO: is this maybe a mistake of the existing implementation ? + expected = self.lvalues.copy() + expected["attributes"] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + +class Test_difference(MixinSplitattrsMatrixTests): + operation_name = "difference" + + @pytest.fixture(autouse=True) + def setup(self): + self.lvalues = dict( standard_name=sentinel.standard_name, long_name=sentinel.long_name, var_name=sentinel.var_name, units=sentinel.units, - attributes=sentinel.attributes, + attributes=dict(), # MUST be a dict cell_methods=sentinel.cell_methods, ) + # Make a copy with all-different objects in it. + self.rvalues = deepcopy(self.lvalues) self.dummy = sentinel.dummy self.cls = CubeMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) + assert self.cls.difference.__doc__ == BaseMetadata.difference.__doc__ def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) + assert qualname_difference in _LENIENT + assert _LENIENT[qualname_difference] + assert _LENIENT[self.cls.difference] def test_lenient_default(self): other = sentinel.other @@ -456,11 +835,8 @@ def test_lenient_default(self): ) as mocker: result = self.none.difference(other) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) + assert return_value == result + assert mocker.call_args_list == [mock.call(other, lenient=None)] def test_lenient(self): other = sentinel.other @@ -471,178 +847,149 @@ def test_lenient(self): ) as mocker: result = self.none.difference(other, lenient=lenient) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_cell_methods_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["cell_methods"] = (sentinel.cell_methods, None) - rexpected = deepcopy(self.none)._asdict() - rexpected["cell_methods"] = (None, sentinel.cell_methods) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["units"] = (left["units"], right["units"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["units"] = lexpected["units"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different_cell_methods(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["cell_methods"] = ( - left["cell_methods"], - right["cell_methods"], - ) - rexpected = deepcopy(self.none)._asdict() - rexpected["cell_methods"] = lexpected["cell_methods"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_strict_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_cell_methods(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["cell_methods"] = ( - left["cell_methods"], - right["cell_methods"], - ) - rexpected = deepcopy(self.none)._asdict() - rexpected["cell_methods"] = lexpected["cell_methods"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() + assert return_value == result + assert mocker.call_args_list == [mock.call(other, lenient=lenient)] + + def test_op_same(self, op_leniency): + is_lenient = op_leniency == "lenient" + lmetadata = self.cls(**self.lvalues) + rmetadata = self.cls(**self.rvalues) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None + + def test_op_different__none(self, fieldname, op_leniency): + # One side has field=value, and the other field=None, both strict + lenient. + if fieldname in ("attributes",): + # These cannot properly be set to 'None'. Tested elsewhere. + pytest.skip() + + is_lenient = op_leniency == "lenient" + + lmetadata = self.cls(**self.lvalues) + self.rvalues[fieldname] = None + rmetadata = self.cls(**self.rvalues) + + if fieldname in ("units", "cell_methods"): + # These ones are always "strict" + strict_result = True + elif fieldname in ("standard_name", "long_name", "var_name"): + strict_result = not is_lenient + else: + # Ensure we are handling all the different field cases + raise ValueError( + f"{self.__name__} unhandled fieldname : {fieldname}" ) - def test_op_strict_different_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() + if strict_result: + diffentry = tuple( + [getattr(mm, fieldname) for mm in (lmetadata, rmetadata)] ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_measure_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["cell_methods"] = ( - left["cell_methods"], - right["cell_methods"], + # NOTE: in these cases, the difference metadata will fail an == operation, + # because of the 'None' entries. + # But we can use metadata._asdict() and test that. + lexpected = self.none._asdict() + lexpected[fieldname] = diffentry + rexpected = lexpected.copy() + rexpected[fieldname] = diffentry[::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + if strict_result: + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected + else: + # Expect NO differences + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None + + def test_op_different__value(self, fieldname, op_leniency): + # One field has different value for lhs/rhs, both strict + lenient. + if fieldname == "attributes": + # Attribute behaviours are tested separately + pytest.skip() + + self.lvalues[fieldname] = mock.sentinel.value1 + self.rvalues[fieldname] = mock.sentinel.value2 + lmetadata = self.cls(**self.lvalues) + rmetadata = self.cls(**self.rvalues) + + # In all cases, this field should show a difference : leniency has no effect + ldiff_values = (mock.sentinel.value1, mock.sentinel.value2) + ldiff_metadata = self.none._asdict() + ldiff_metadata[fieldname] = ldiff_values + rdiff_metadata = self.none._asdict() + rdiff_metadata[fieldname] = ldiff_values[::-1] + + # Check both l+r and r+l + assert lmetadata.difference(rmetadata)._asdict() == ldiff_metadata + assert rmetadata.difference(lmetadata)._asdict() == rdiff_metadata + + def test_op_different__attribute_extra(self, op_leniency): + # One field has an extra attribute, both strict + lenient. + is_lenient = op_leniency == "lenient" + self.lvalues["attributes"] = {"_a_common_": self.dummy} + lmetadata = self.cls(**self.lvalues) + rvalues = deepcopy(self.lvalues) + rvalues["attributes"]["_b_extra_"] = mock.sentinel.extra + rmetadata = self.cls(**rvalues) + + if not is_lenient: + # In this case, attributes returns a "difference dictionary" + diffentry = tuple([{}, {"_b_extra_": mock.sentinel.extra}]) + lexpected = self.none._asdict() + lexpected["attributes"] = diffentry + rexpected = lexpected.copy() + rexpected["attributes"] = diffentry[::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + if is_lenient: + # It recognises no difference + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None + else: + # As calculated above + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected + + def test_op_different__attribute_value(self, op_leniency): + # lhs and rhs have different values for an attribute, both strict + lenient. + is_lenient = op_leniency == "lenient" + self.lvalues["attributes"] = { + "_a_common_": self.dummy, + "_b_extra_": mock.sentinel.value1, + } + lmetadata = self.cls(**self.lvalues) + self.rvalues["attributes"] = { + "_a_common_": self.dummy, + "_b_extra_": mock.sentinel.value2, + } + rmetadata = self.cls(**self.rvalues) + + # In this case, attributes returns a "difference dictionary" + diffentry = tuple( + [ + {"_b_extra_": mock.sentinel.value1}, + {"_b_extra_": mock.sentinel.value2}, + ] ) - rexpected = deepcopy(self.none)._asdict() - rexpected["cell_methods"] = lexpected["cell_methods"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + lexpected = self.none._asdict() + lexpected["attributes"] = diffentry + rexpected = lexpected.copy() + rexpected["attributes"] = diffentry[::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=is_lenient + ): + # As calculated above -- same for both strict + lenient + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected class Test_equal(tests.IrisTest): diff --git a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py index 7416bb9da5..d29a120f35 100644 --- a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py +++ b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py @@ -20,7 +20,7 @@ class Test(tests.IrisTest): def setUp(self): - self.forbidden_keys = LimitedAttributeDict._forbidden_keys + self.forbidden_keys = LimitedAttributeDict.CF_ATTRS_FORBIDDEN self.emsg = "{!r} is not a permitted attribute" def test__invalid_keys(self): diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index b1eed4743e..5e513c2bd0 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -33,7 +33,7 @@ CellMethod, DimCoord, ) -from iris.cube import Cube +from iris.cube import Cube, CubeAttrsDict import iris.exceptions from iris.exceptions import ( AncillaryVariableNotFoundError, @@ -3436,5 +3436,31 @@ def test_fail_assign_duckcellmethod(self): self.cube.cell_methods = (test_object,) +class TestAttributesProperty: + def test_attrs_type(self): + # Cube attributes are always of a special dictionary type. + cube = Cube([0], attributes={"a": 1}) + assert type(cube.attributes) is CubeAttrsDict + assert cube.attributes == {"a": 1} + + def test_attrs_remove(self): + # Wiping attributes replaces the stored object + cube = Cube([0], attributes={"a": 1}) + attrs = cube.attributes + cube.attributes = None + assert cube.attributes is not attrs + assert type(cube.attributes) is CubeAttrsDict + assert cube.attributes == {} + + def test_attrs_clear(self): + # Clearing attributes leaves the same object + cube = Cube([0], attributes={"a": 1}) + attrs = cube.attributes + cube.attributes.clear() + assert cube.attributes is attrs + assert type(cube.attributes) is CubeAttrsDict + assert cube.attributes == {} + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py new file mode 100644 index 0000000000..615de7b8e6 --- /dev/null +++ b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py @@ -0,0 +1,407 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for the `iris.cube.CubeAttrsDict` class.""" + +import pickle + +import numpy as np +import pytest + +from iris.common.mixin import LimitedAttributeDict +from iris.cube import CubeAttrsDict +from iris.fileformats.netcdf.saver import _CF_GLOBAL_ATTRS + + +@pytest.fixture +def sample_attrs() -> CubeAttrsDict: + return CubeAttrsDict( + locals={"a": 1, "z": "this"}, globals={"b": 2, "z": "that"} + ) + + +def check_content(attrs, locals=None, globals=None, matches=None): + """ + Check a CubeAttrsDict for expected properties. + + Its ".globals" and ".locals" must match 'locals' and 'globals' args + -- except that, if 'matches' is provided, it is a CubeAttrsDict, whose + locals/globals *replace* the 'locals'/'globals' arguments. + + Check that the result is a CubeAttrsDict and, for both local + global parts, + * parts match for *equality* (==) but are *non-identical* (is not) + * order of keys matches expected (N.B. which is *not* required for equality) + """ + assert isinstance(attrs, CubeAttrsDict) + attr_locals, attr_globals = attrs.locals, attrs.globals + assert type(attr_locals) is LimitedAttributeDict + assert type(attr_globals) is LimitedAttributeDict + if matches: + locals, globals = matches.locals, matches.globals + + def check(arg, content): + if not arg: + arg = {} + if not isinstance(arg, LimitedAttributeDict): + arg = LimitedAttributeDict(arg) + # N.B. if 'arg' is an actual given LimitedAttributeDict, it is not changed.. + # .. we proceed to ensure that the stored content is equal but NOT the same + assert content == arg + assert content is not arg + assert list(content.keys()) == list(arg.keys()) + + check(locals, attr_locals) + check(globals, attr_globals) + + +class Test___init__: + def test_empty(self): + attrs = CubeAttrsDict() + check_content(attrs, None, None) + + def test_from_combined_dict(self): + attrs = CubeAttrsDict({"q": 3, "history": "something"}) + check_content(attrs, locals={"q": 3}, globals={"history": "something"}) + + def test_from_separate_dicts(self): + locals = {"q": 3} + globals = {"history": "something"} + attrs = CubeAttrsDict(locals=locals, globals=globals) + check_content(attrs, locals=locals, globals=globals) + + def test_from_cubeattrsdict(self, sample_attrs): + result = CubeAttrsDict(sample_attrs) + check_content(result, matches=sample_attrs) + + def test_from_cubeattrsdict_like(self): + class MyDict: + pass + + mydict = MyDict() + locals, globals = {"a": 1}, {"b": 2} + mydict.locals = locals + mydict.globals = globals + attrs = CubeAttrsDict(mydict) + check_content(attrs, locals=locals, globals=globals) + + +class Test_OddMethods: + def test_pickle(self, sample_attrs): + bytes = pickle.dumps(sample_attrs) + result = pickle.loads(bytes) + check_content(result, matches=sample_attrs) + + def test_clear(self, sample_attrs): + sample_attrs.clear() + check_content(sample_attrs, {}, {}) + + def test_del(self, sample_attrs): + # 'z' is in both locals+globals. Delete removes both. + assert "z" in sample_attrs.keys() + del sample_attrs["z"] + assert "z" not in sample_attrs.keys() + + def test_copy(self, sample_attrs): + copy = sample_attrs.copy() + assert copy is not sample_attrs + check_content(copy, matches=sample_attrs) + + @pytest.fixture(params=["regular_arg", "split_arg"]) + def update_testcase(self, request): + lhs = CubeAttrsDict(globals={"a": 1, "b": 2}, locals={"b": 3, "c": 4}) + if request.param == "split_arg": + # A set of "update settings", with global/local-specific keys. + rhs = CubeAttrsDict( + globals={"a": 1001, "x": 1007}, + # NOTE: use a global-default key here, to check that type is preserved + locals={"b": 1003, "history": 1099}, + ) + expected_result = CubeAttrsDict( + globals={"a": 1001, "b": 2, "x": 1007}, + locals={"b": 1003, "c": 4, "history": 1099}, + ) + else: + assert request.param == "regular_arg" + # A similar set of update values in a regular dict (so not local/global) + rhs = {"a": 1001, "x": 1007, "b": 1003, "history": 1099} + expected_result = CubeAttrsDict( + globals={"a": 1001, "b": 2, "history": 1099}, + locals={"b": 1003, "c": 4, "x": 1007}, + ) + return lhs, rhs, expected_result + + def test_update(self, update_testcase): + testval, updater, expected = update_testcase + testval.update(updater) + check_content(testval, matches=expected) + + def test___or__(self, update_testcase): + testval, updater, expected = update_testcase + original = testval.copy() + result = testval | updater + assert result is not testval + assert testval == original + check_content(result, matches=expected) + + def test___ior__(self, update_testcase): + testval, updater, expected = update_testcase + testval |= updater + check_content(testval, matches=expected) + + def test___ror__(self): + # Check the "or" operation, when lhs is a regular dictionary + lhs = {"a": 1, "b": 2, "history": 3} + rhs = CubeAttrsDict( + globals={"a": 1001, "x": 1007}, + # NOTE: use a global-default key here, to check that type is preserved + locals={"b": 1003, "history": 1099}, + ) + # The lhs should be promoted to a CubeAttrsDict, and then combined. + expected = CubeAttrsDict( + globals={"history": 3, "a": 1001, "x": 1007}, + locals={"a": 1, "b": 1003, "history": 1099}, + ) + result = lhs | rhs + check_content(result, matches=expected) + + @pytest.mark.parametrize("value", [1, None]) + @pytest.mark.parametrize("inputtype", ["regular_arg", "split_arg"]) + def test__fromkeys(self, value, inputtype): + if inputtype == "regular_arg": + # Check when input is a plain iterable of key-names + keys = ["a", "b", "history"] + # Result has keys assigned local/global via default mechanism. + expected = CubeAttrsDict( + globals={"history": value}, + locals={"a": value, "b": value}, + ) + else: + assert inputtype == "split_arg" + # Check when input is a CubeAttrsDict + keys = CubeAttrsDict( + globals={"a": 1}, locals={"b": 2, "history": 3} + ) + # The result preserves the input keys' local/global identity + # N.B. "history" would be global by default (cf. "regular_arg" case) + expected = CubeAttrsDict( + globals={"a": value}, + locals={"b": value, "history": value}, + ) + result = CubeAttrsDict.fromkeys(keys, value) + check_content(result, matches=expected) + + def test_to_dict(self, sample_attrs): + result = dict(sample_attrs) + expected = sample_attrs.globals.copy() + expected.update(sample_attrs.locals) + assert result == expected + + def test_array_copies(self): + array = np.array([3, 2, 1, 4]) + map = {"array": array} + attrs = CubeAttrsDict(map) + check_content(attrs, globals=None, locals=map) + attrs_array = attrs["array"] + assert np.all(attrs_array == array) + assert attrs_array is not array + + def test__str__(self, sample_attrs): + result = str(sample_attrs) + assert result == "{'b': 2, 'z': 'this', 'a': 1}" + + def test__repr__(self, sample_attrs): + result = repr(sample_attrs) + expected = ( + "CubeAttrsDict(" + "globals={'b': 2, 'z': 'that'}, " + "locals={'a': 1, 'z': 'this'})" + ) + assert result == expected + + +class TestEq: + def test_eq_empty(self): + attrs_1 = CubeAttrsDict() + attrs_2 = CubeAttrsDict() + assert attrs_1 == attrs_2 + + def test_eq_nonempty(self, sample_attrs): + attrs_1 = sample_attrs + attrs_2 = sample_attrs.copy() + assert attrs_1 == attrs_2 + + @pytest.mark.parametrize("aspect", ["locals", "globals"]) + def test_ne_missing(self, sample_attrs, aspect): + attrs_1 = sample_attrs + attrs_2 = sample_attrs.copy() + del getattr(attrs_2, aspect)["z"] + assert attrs_1 != attrs_2 + assert attrs_2 != attrs_1 + + @pytest.mark.parametrize("aspect", ["locals", "globals"]) + def test_ne_different(self, sample_attrs, aspect): + attrs_1 = sample_attrs + attrs_2 = sample_attrs.copy() + getattr(attrs_2, aspect)["z"] = 99 + assert attrs_1 != attrs_2 + assert attrs_2 != attrs_1 + + def test_ne_locals_vs_globals(self): + attrs_1 = CubeAttrsDict(locals={"a": 1}) + attrs_2 = CubeAttrsDict(globals={"a": 1}) + assert attrs_1 != attrs_2 + assert attrs_2 != attrs_1 + + def test_eq_dict(self): + # A CubeAttrsDict can be equal to a plain dictionary (which would create it) + vals_dict = {"a": 1, "b": 2, "history": "this"} + attrs = CubeAttrsDict(vals_dict) + assert attrs == vals_dict + assert vals_dict == attrs + + def test_ne_dict_local_global(self): + # Dictionary equivalence fails if the local/global assignments are wrong. + # sample dictionary + vals_dict = {"title": "b"} + # these attrs are *not* the same, because 'title' is global by default + attrs = CubeAttrsDict(locals={"title": "b"}) + assert attrs != vals_dict + assert vals_dict != attrs + + def test_empty_not_none(self): + # An empty CubeAttrsDict is not None, and does not compare to 'None' + # N.B. this for compatibility with the LimitedAttributeDict + attrs = CubeAttrsDict() + assert attrs is not None + with pytest.raises(TypeError, match="iterable"): + # Cannot *compare* to None (or anything non-iterable) + # N.B. not actually testing against None, as it upsets black (!) + attrs == 0 + + def test_empty_eq_iterables(self): + # An empty CubeAttrsDict is "equal" to various empty containers + attrs = CubeAttrsDict() + assert attrs == {} + assert attrs == [] + assert attrs == () + + +class TestDictOrderBehaviour: + def test_ordering(self): + attrs = CubeAttrsDict({"a": 1, "b": 2}) + assert list(attrs.keys()) == ["a", "b"] + # Remove, then reinstate 'a' : it will go to the back + del attrs["a"] + attrs["a"] = 1 + assert list(attrs.keys()) == ["b", "a"] + + def test_globals_locals_ordering(self): + # create attrs with a global attribute set *before* a local one .. + attrs = CubeAttrsDict() + attrs.globals.update(dict(a=1, m=3)) + attrs.locals.update(dict(f=7, z=4)) + # .. and check key order of combined attrs + assert list(attrs.keys()) == ["a", "m", "f", "z"] + + def test_locals_globals_nonalphabetic_order(self): + # create the "same" thing with locals before globals, *and* different key order + attrs = CubeAttrsDict() + attrs.locals.update(dict(z=4, f=7)) + attrs.globals.update(dict(m=3, a=1)) + # .. this shows that the result is not affected either by alphabetical key + # order, or the order of adding locals/globals + # I.E. result is globals-in-create-order, then locals-in-create-order + assert list(attrs.keys()) == ["m", "a", "z", "f"] + + +class TestSettingBehaviours: + def test_add_localtype(self): + attrs = CubeAttrsDict() + # Any attribute not recognised as global should go into 'locals' + attrs["z"] = 3 + check_content(attrs, locals={"z": 3}) + + @pytest.mark.parametrize("attrname", _CF_GLOBAL_ATTRS) + def test_add_globaltype(self, attrname): + # These specific attributes are recognised as belonging in 'globals' + attrs = CubeAttrsDict() + attrs[attrname] = "this" + check_content(attrs, globals={attrname: "this"}) + + def test_overwrite_local(self): + attrs = CubeAttrsDict({"a": 1}) + attrs["a"] = 2 + check_content(attrs, locals={"a": 2}) + + @pytest.mark.parametrize("attrname", _CF_GLOBAL_ATTRS) + def test_overwrite_global(self, attrname): + attrs = CubeAttrsDict({attrname: 1}) + attrs[attrname] = 2 + check_content(attrs, globals={attrname: 2}) + + @pytest.mark.parametrize("global_attrname", _CF_GLOBAL_ATTRS) + def test_overwrite_forced_local(self, global_attrname): + attrs = CubeAttrsDict(locals={global_attrname: 1}) + # The attr *remains* local, even though it would be created global by default + attrs[global_attrname] = 2 + check_content(attrs, locals={global_attrname: 2}) + + def test_overwrite_forced_global(self): + attrs = CubeAttrsDict(globals={"data": 1}) + # The attr remains global, even though it would be created local by default + attrs["data"] = 2 + check_content(attrs, globals={"data": 2}) + + def test_overwrite_both(self): + attrs = CubeAttrsDict(locals={"z": 1}, globals={"z": 1}) + # Where both exist, it will always update the local one + attrs["z"] = 2 + check_content(attrs, locals={"z": 2}, globals={"z": 1}) + + def test_local_global_masking(self, sample_attrs): + # initially, local 'z' masks the global one + assert sample_attrs["z"] == sample_attrs.locals["z"] + # remove local, global will show + del sample_attrs.locals["z"] + assert sample_attrs["z"] == sample_attrs.globals["z"] + # re-set local + sample_attrs.locals["z"] = "new" + assert sample_attrs["z"] == "new" + # change the global, makes no difference + sample_attrs.globals["z"] == "other" + assert sample_attrs["z"] == "new" + + @pytest.mark.parametrize("globals_or_locals", ("globals", "locals")) + @pytest.mark.parametrize( + "value_type", + ("replace", "emptylist", "emptytuple", "none", "zero", "false"), + ) + def test_replace_subdict(self, globals_or_locals, value_type): + # Writing to attrs.xx always replaces content with a *new* LimitedAttributeDict + locals, globals = {"a": 1}, {"b": 2} + attrs = CubeAttrsDict(locals=locals, globals=globals) + # Snapshot old + write new value, of either locals or globals + old_content = getattr(attrs, globals_or_locals) + value = { + "replace": {"qq": 77}, + "emptytuple": (), + "emptylist": [], + "none": None, + "zero": 0, + "false": False, + }[value_type] + setattr(attrs, globals_or_locals, value) + # check new content is expected type and value + new_content = getattr(attrs, globals_or_locals) + assert isinstance(new_content, LimitedAttributeDict) + assert new_content is not old_content + if value_type != "replace": + value = {} + assert new_content == value + # Check expected whole: i.e. either globals or locals was replaced with value + if globals_or_locals == "globals": + globals = value + else: + locals = value + check_content(attrs, locals=locals, globals=globals) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py index e2297be69e..973e10217b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py @@ -41,7 +41,7 @@ def _make_engine(global_attributes=None, standard_name=None, long_name=None): return engine -class TestInvalidGlobalAttributes(tests.IrisTest): +class TestGlobalAttributes(tests.IrisTest): def test_valid(self): global_attributes = { "Conventions": "CF-1.5", @@ -50,7 +50,7 @@ def test_valid(self): engine = _make_engine(global_attributes) build_cube_metadata(engine) expected = global_attributes - self.assertEqual(engine.cube.attributes, expected) + self.assertEqual(engine.cube.attributes.globals, expected) def test_invalid(self): global_attributes = { @@ -64,13 +64,14 @@ def test_invalid(self): # Check for a warning. self.assertEqual(warn.call_count, 1) self.assertIn( - "Skipping global attribute 'calendar'", warn.call_args[0][0] + "Skipping disallowed global attribute 'calendar'", + warn.call_args[0][0], ) # Check resulting attributes. The invalid entry 'calendar' # should be filtered out. global_attributes.pop("calendar") expected = global_attributes - self.assertEqual(engine.cube.attributes, expected) + self.assertEqual(engine.cube.attributes.globals, expected) class TestCubeName(tests.IrisTest): diff --git a/lib/iris/tests/unit/util/test_equalise_attributes.py b/lib/iris/tests/unit/util/test_equalise_attributes.py index a4198160a9..de5308a7fa 100644 --- a/lib/iris/tests/unit/util/test_equalise_attributes.py +++ b/lib/iris/tests/unit/util/test_equalise_attributes.py @@ -13,8 +13,13 @@ import numpy as np -from iris.cube import Cube +from iris.coords import AuxCoord +from iris.cube import Cube, CubeAttrsDict import iris.tests.stock +from iris.tests.unit.common.metadata.test_CubeMetadata import ( + _TEST_ATTRNAME, + make_attrsdict, +) from iris.util import equalise_attributes @@ -152,5 +157,111 @@ def test_complex_somecommon(self): ) +class TestSplitattributes: + """ + Extra testing for cases where attributes differ specifically by type + + That is, where there is a new possibility of 'mismatch' due to the newer "typing" + of attributes as global or local. + + Specifically, it is now possible that although + "cube1.attributes.keys() == cube2.attributes.keys()", + AND "cube1.attributes[k] == cube2.attributes[k]" for all keys, + YET STILL (possibly) "cube1.attributes != cube2.attributes" + """ + + @staticmethod + def _sample_splitattrs_cube(attr_global_local): + attrs = CubeAttrsDict( + globals=make_attrsdict(attr_global_local[0]), + locals=make_attrsdict(attr_global_local[1]), + ) + return Cube([0], attributes=attrs) + + @staticmethod + def check_equalised_result(cube1, cube2): + equalise_attributes([cube1, cube2]) + # Note: "X" represents a missing attribute, as in test_CubeMetadata + return [ + ( + cube1.attributes.globals.get(_TEST_ATTRNAME, "X") + + cube1.attributes.locals.get(_TEST_ATTRNAME, "X") + ), + ( + cube2.attributes.globals.get(_TEST_ATTRNAME, "X") + + cube2.attributes.locals.get(_TEST_ATTRNAME, "X") + ), + ] + + def test__global_and_local__bothsame(self): + # A trivial case showing that the original globals+locals are both preserved. + cube1 = self._sample_splitattrs_cube("AB") + cube2 = self._sample_splitattrs_cube("AB") + result = self.check_equalised_result(cube1, cube2) + assert result == ["AB", "AB"] + + def test__globals_different(self): + cube1 = self._sample_splitattrs_cube("AX") + cube2 = self._sample_splitattrs_cube("BX") + result = self.check_equalised_result(cube1, cube2) + assert result == ["XX", "XX"] + + def test__locals_different(self): + cube1 = self._sample_splitattrs_cube("XA") + cube2 = self._sample_splitattrs_cube("XB") + result = self.check_equalised_result(cube1, cube2) + assert result == ["XX", "XX"] + + def test__oneglobal_onelocal__different(self): + cube1 = self._sample_splitattrs_cube("AX") + cube2 = self._sample_splitattrs_cube("XB") + result = self.check_equalised_result(cube1, cube2) + assert result == ["XX", "XX"] + + # This case fails without the split-attributes fix. + def test__oneglobal_onelocal__same(self): + cube1 = self._sample_splitattrs_cube("AX") + cube2 = self._sample_splitattrs_cube("XA") + result = self.check_equalised_result(cube1, cube2) + assert result == ["XX", "XX"] + + def test__sameglobals_onelocal__different(self): + cube1 = self._sample_splitattrs_cube("AB") + cube2 = self._sample_splitattrs_cube("AX") + result = self.check_equalised_result(cube1, cube2) + assert result == ["XX", "XX"] + + # This case fails without the split-attributes fix. + def test__sameglobals_onelocal__same(self): + cube1 = self._sample_splitattrs_cube("AA") + cube2 = self._sample_splitattrs_cube("AX") + result = self.check_equalised_result(cube1, cube2) + assert result == ["XX", "XX"] + + # This case fails without the split-attributes fix. + def test__differentglobals_samelocals(self): + cube1 = self._sample_splitattrs_cube("AC") + cube2 = self._sample_splitattrs_cube("BC") + result = self.check_equalised_result(cube1, cube2) + assert result == ["XX", "XX"] + + +class TestNonCube: + # Just to assert that we can do operations on non-cube components (like Coords), + # in fact effectively, anything with a ".attributes". + # Even though the docstring does not admit this, we test it because we put in + # special code to preserve it when adding the split-attribute handling. + def test(self): + attrs = [1, 1, 2] + coords = [ + AuxCoord([0], attributes={"a": attr, "b": "all_the_same"}) + for attr in attrs + ] + equalise_attributes(coords) + assert all( + coord.attributes == {"b": "all_the_same"} for coord in coords + ) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/util.py b/lib/iris/util.py index 4509f2885b..10a58fdef0 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -2071,24 +2071,50 @@ def equalise_attributes(cubes): See more at :doc:`/userguide/real_and_lazy_data`. """ - removed = [] + # deferred import to avoid circularity problem + from iris.common._split_attribute_dicts import ( + _convert_splitattrs_to_pairedkeys_dict, + ) + + cube_attrs = [cube.attributes for cube in cubes] + + # Convert all the input dictionaries to ones with 'paired' keys, so each key + # becomes a pair, ('local'/'global', attribute-name), making them specific to each + # "type", i.e. global or local. + # This is needed to ensure that afterwards all cubes will have identical + # attributes, E.G. it treats an attribute which is global on one cube and local + # on another as *not* the same. This is essential to its use in making merges work. + # + # This approach does also still function with "ordinary" dictionaries, or + # :class:`iris.common.mixin.LimitedAttributeDict`, though somewhat inefficiently, + # so the routine works on *other* objects bearing attributes, i.e. not just Cubes. + # That is also important since the original code allows that (though the docstring + # does not admit it). + cube_attrs = [ + _convert_splitattrs_to_pairedkeys_dict(dic) for dic in cube_attrs + ] + # Work out which attributes are identical across all the cubes. - common_keys = list(cubes[0].attributes.keys()) + common_keys = list(cube_attrs[0].keys()) keys_to_remove = set(common_keys) - for cube in cubes[1:]: - cube_keys = list(cube.attributes.keys()) + for attrs in cube_attrs[1:]: + cube_keys = list(attrs.keys()) keys_to_remove.update(cube_keys) common_keys = [ key for key in common_keys - if ( - key in cube_keys - and np.all(cube.attributes[key] == cubes[0].attributes[key]) - ) + if (key in cube_keys and np.all(attrs[key] == cube_attrs[0][key])) ] keys_to_remove.difference_update(common_keys) - # Remove all the other attributes. + # Convert back from the resulting 'paired' keys set, extracting just the + # attribute-name parts, as a set of names to be discarded. + # Note: we don't care any more what type (global/local) these were : we will + # simply remove *all* attributes with those names. + keys_to_remove = set(key_pair[1] for key_pair in keys_to_remove) + + # Remove all the non-matching attributes. + removed = [] for cube in cubes: deleted_attributes = { key: cube.attributes.pop(key) @@ -2096,6 +2122,7 @@ def equalise_attributes(cubes): if key in cube.attributes } removed.append(deleted_attributes) + return removed From 54582d928e5e20d3cf09e9133c10714e58ecbe65 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Wed, 22 Nov 2023 16:47:39 +0000 Subject: [PATCH 087/134] [CI Bot] environment lockfiles auto-update (#5547) * Updated environment lockfiles * Fix test to ignore extra warnings from later versions of distributed. (#5600) --------- Co-authored-by: Lockfile bot Co-authored-by: Patrick Peglar --- .../integration/netcdf/test_delayed_save.py | 32 ++++- requirements/locks/py310-linux-64.lock | 124 ++++++++--------- requirements/locks/py311-linux-64.lock | 122 ++++++++--------- requirements/locks/py39-linux-64.lock | 126 +++++++++--------- 4 files changed, 211 insertions(+), 193 deletions(-) diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index c8c218000c..177e9ce325 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -5,6 +5,7 @@ """ Integration tests for delayed saving. """ +import re import warnings from cf_units import Unit @@ -197,19 +198,36 @@ def test_scheduler_types( if not save_is_delayed: assert result is None - assert len(logged_warnings) == 2 issued_warnings = [log.message for log in logged_warnings] else: assert result is not None assert len(logged_warnings) == 0 - warnings.simplefilter("error") - issued_warnings = result.compute() + with warnings.catch_warnings(record=True) as logged_warnings: + # The compute *returns* warnings from the delayed operations. + issued_warnings = result.compute() + issued_warnings = [ + log.message for log in logged_warnings + ] + issued_warnings + + warning_messages = [warning.args[0] for warning in issued_warnings] + if scheduler_type == "DistributedScheduler": + # Ignore any "large data transfer" messages generated, + # specifically when testing with the Distributed scheduler. + # These may not always occur and don't reflect something we want to + # test for. + large_transfer_message_regex = re.compile( + "Sending large graph.* may cause some slowdown", re.DOTALL + ) + warning_messages = [ + message + for message in warning_messages + if not large_transfer_message_regex.search(message) + ] - assert len(issued_warnings) == 2 + # In all cases, should get 2 fill value warnings overall. + assert len(warning_messages) == 2 expected_msg = "contains unmasked data points equal to the fill-value" - assert all( - expected_msg in warning.args[0] for warning in issued_warnings - ) + assert all(expected_msg in message for message in warning_messages) def test_time_of_writing( self, save_is_delayed, output_path, scheduler_type diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 2655960622..18b8ee256c 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 94966cd7393527bff211c87589678b2ffe1697705267a20b2708a4cc27da5376 +# input_hash: df35455963a70471a00b88b3c8609117d9379aebcb6472b49d2a621e0d0895fa @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,18 +9,18 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda#26322ec5d7712c3ded99dd656142b8ce https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda#e2042154faafe61969556f28bade94b9 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.20.1-hd590300_0.conda#6642e4faa4804be3a0e7edfefbd16595 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.21.0-hd590300_0.conda#c06fa0440048270817b9e3142cc661bf https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -36,11 +36,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 @@ -49,9 +49,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.4-hd590300_0.conda#412ba6938c3e2abaca8b1129ea82e238 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -74,21 +74,21 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.2-h2797004_0.conda#4b441a1ee22397d5a27dc1126b849edd +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_5.conda#1e8ef4090ca4f0d66404a7441e1dbf3c -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_6.conda#80bf3b277c120dd294b51d404b931a75 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 @@ -96,16 +96,16 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_5.conda#b72f016c910ff9295b1377d3e17da3f2 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 -https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.2-h2c6b66d_0.conda#c37b95bcd6c6833dacfd5df0ae2f4303 +https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda#f3a8c32aa764c3e7188b4b810fc9d6ce +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.0-h2c6b66d_0.conda#df56c636df4a98990462d66ac7be2330 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 @@ -120,22 +120,22 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f2 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_1.conda#1f95722c94f00b69af69a066c7433714 https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1ab_0.conda#fef8ef5f0a54546b9efee39468229917 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py310hc6cd4ac_0.conda#90bccd216944c486966c3846b339b42f +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.5-py310hc6cd4ac_0.conda#9156537f8d99eb8c45d0f811e8164527 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_1.conda#a50918d10114a0bf80fb46c7cc692058 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -143,11 +143,11 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda#b8d67603d43b23ce7e988a5d81a7ab79 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda#420f4e9be59d0dc9133a0f43f7bab3f3 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 @@ -181,7 +181,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda#b23e0147fa5f7a9380e06334c7266ad5 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda#72637c58d36d9475fda24700c9796f19 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.3-pyhd8ed1ab_0.conda#3fc026b9c87d091c4b34a6c997324ae8 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -189,79 +189,79 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.0-pyhd8ed1ab_0.conda#22541af7a9eb59fc6afcadb7ecdf9219 +https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda#45846a970e71ac98fd327da5d40a0a2c https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.conda#33c03cd5711885c920ddff676fb84f98 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.43.1-py310h2372a71_0.conda#c7d552c32b87beb736c9658441bf93a9 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.3-py310h2372a71_0.conda#b4bfb11c034c257e20159e9001cd8e28 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_1.conda#8d7242302bb3d03b9a690b6dda872603 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda#d12374af44575413fbbd4a217d46ea33 https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda#9f100edf65436e3eabc2a51fc00b2c37 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py310h01dd4db_2.conda#9ef290f84bf1f3932e9b42117d9364ff -https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py310h01dd4db_0.conda#95d87a906d88b5824d7d36eeef091dba +https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py310hc6cd4ac_1.conda#c7936ec7db24bb913671a1bc5eb2b79d +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py310hc6cd4ac_0.conda#68d5bfccaba2d89a7812098dd3966d9b https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.6-pyhd8ed1ab_0.conda#d5f8944ff9ab24a292511c83dce33dea -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py310hb13e2d6_0.conda#ac3b67e928cc71548efad9b522d42fef -https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310h32c33b7_2.conda#bfb5c8fe5b2cce3ca6140cbd61ecef3b +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310h32c33b7_4.conda#124211262afed349430d9a3de6b51e8f https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py310hc6cd4ac_5.conda#ef5333594a958b25912002886b82b253 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.4.0-pyhd8ed1ab_0.conda#b8dc6f9db1b9670e564b68277a79ffeb https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hd41b1e2_4.conda#35e87277fba9944b8a975113538bb5df -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310h1f7b6fc_2.conda#7925aaa4330045bc32d334b20f446902 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py310hd41b1e2_1.conda#6a38f65d330b74495ad6990280486049 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.3-pyhd8ed1ab_0.conda#a7155483171dbc27a7385d1c26e779de -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.30-pyhd8ed1ab_0.conda#b7a2e3bb89bda8c69839485c20aabadf -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py310h1f7b6fc_0.conda#31beda75384647959d5792a1a7dc571a +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py310hd41b1e2_0.conda#85d2aaa7af046528d339da1e813c3a9f +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.31-pyhd8ed1ab_0.conda#fea10604a45e974b110ea15a88913ebc +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py310h2372a71_1.conda#dfcf64f67961eb9686676f96fdb4b4d1 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py310hcc13569_1.conda#a64a2b4907b96d4bf3c9dab59563ab50 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py310hcc13569_0.conda#30a39c1064e5efc578d83c2a5f7cd749 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h1f7b6fc_1.conda#be6f0382440ccbf9fb01bb19ab1f1fc0 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py310hb13e2d6_1.conda#4260b359d8fbeab4f789a8b0f968079f https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py310h7dcad9a_0.conda#0d7c35fe5cc1f436e368ddd500deb979 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h1f7b6fc_3.conda#ce30848c8731fe993893a872218dd37a -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.6-pyhd8ed1ab_0.conda#fb1fc875719e217ed799a7aae11d3be4 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h1f7b6fc_4.conda#0ca55ca20891d393846695354b32ebc5 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py310h62c0568_2.conda#5c0d101ef8fc542778aa80795a759d08 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_103.conda#0850d2a119d51601b20c406a4909af4d +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.1-py310h62c0568_0.conda#e650bd952e5618050ccb088bc0c6dfb4 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py310hba70d50_100.conda#e19392760c7e4da3b9cb0ee5bf61bc4b https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h1f7b6fc_1.conda#857b828a13cdddf568958f7575b25b22 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310hcc13569_1.conda#31ef447724fb19066a9d00a660dab1bd https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_5.conda#f4fe7a6e3d7c78c9de048ea9dda21690 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py310hff52083_2.conda#cda26b4d722d7319ce66df50332ff09b -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.1-py310hff52083_0.conda#acd62190c3822df888791592130aa286 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.3-pyhd8ed1ab_1.conda#fbe2993dd48f14724b90bf12e92cc164 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 0bbb6bfdcd..96509aae97 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 40113e38fffa3a31ce64e60231c756c740914d9f0444edaeecd07e598851abc8 +# input_hash: f2209792c838739771cbeb38eb5659da1f847d44387a829c931482c65e2f8885 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,18 +9,18 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda#d786502c97404c94d7d58d258a445a65 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda#e2042154faafe61969556f28bade94b9 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.20.1-hd590300_0.conda#6642e4faa4804be3a0e7edfefbd16595 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.21.0-hd590300_0.conda#c06fa0440048270817b9e3142cc661bf https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -36,11 +36,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 @@ -49,9 +49,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.4-hd590300_0.conda#412ba6938c3e2abaca8b1129ea82e238 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -74,21 +74,21 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.2-h2797004_0.conda#4b441a1ee22397d5a27dc1126b849edd +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_5.conda#1e8ef4090ca4f0d66404a7441e1dbf3c -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_6.conda#80bf3b277c120dd294b51d404b931a75 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 @@ -96,16 +96,16 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_5.conda#b72f016c910ff9295b1377d3e17da3f2 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda#b0dfbe2fcbfdb097d321bfd50ecddab1 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.2-h2c6b66d_0.conda#c37b95bcd6c6833dacfd5df0ae2f4303 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.0-h2c6b66d_0.conda#df56c636df4a98990462d66ac7be2330 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 @@ -120,22 +120,22 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f2 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hb755f60_1.conda#cce9e7c3f1c307f2a5fb08a2922d6164 https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1ab_0.conda#fef8ef5f0a54546b9efee39468229917 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py311hb755f60_0.conda#c54d71e8031a10d08f2e87ff81821588 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.5-py311hb755f60_0.conda#25b42509a68f96e612534af3fe2cf033 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_1.conda#a50918d10114a0bf80fb46c7cc692058 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -143,11 +143,11 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda#420f4e9be59d0dc9133a0f43f7bab3f3 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 @@ -180,7 +180,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_1.conda#a700fcb5cedd3e72d0c75d095c7a6eda https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.3-pyhd8ed1ab_0.conda#3fc026b9c87d091c4b34a6c997324ae8 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -188,79 +188,79 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.0-pyhd8ed1ab_0.conda#22541af7a9eb59fc6afcadb7ecdf9219 +https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.43.1-py311h459d7ec_0.conda#ac995b680de3bdce2531c553b27dfe7e -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.3-py311h459d7ec_0.conda#a811af88d3c522cf36f4674ef699021d +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_1.conda#8d7242302bb3d03b9a690b6dda872603 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda#d12374af44575413fbbd4a217d46ea33 https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda#9f100edf65436e3eabc2a51fc00b2c37 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py311ha6c5da5_2.conda#d6de249502f16ac151fcef9f743937b9 -https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py311ha6c5da5_0.conda#83a988daf5c49e57f7d2086fb6781fe8 +https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py311hb755f60_1.conda#e09eb6aad3607fb6f2c071a2c6a26e1d +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py311hb755f60_0.conda#02336abab4cb5dd794010ef53c54bd09 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.6-pyhd8ed1ab_0.conda#d5f8944ff9ab24a292511c83dce33dea -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py311h64a7726_0.conda#bf16a9f625126e378302f08e7ed67517 -https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_2.conda#8298afb85a731b02dac82e02b6e13ae0 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_4.conda#75d504c6787edc377ebdba087a26a61b https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py311hb755f60_5.conda#e4d262cc3600e70b505a6761d29f6207 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.4.0-pyhd8ed1ab_0.conda#b8dc6f9db1b9670e564b68277a79ffeb https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h1f0f07a_2.conda#571c0c47e8dbcf03577935ac818b6696 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py311h9547e67_1.conda#52d3de443952d33c5cee6b24b172ce96 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.3-pyhd8ed1ab_0.conda#a7155483171dbc27a7385d1c26e779de -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.30-pyhd8ed1ab_0.conda#b7a2e3bb89bda8c69839485c20aabadf -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py311h1f0f07a_0.conda#b7e6d52b39e199238c3400cafaabafb3 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.31-pyhd8ed1ab_0.conda#fea10604a45e974b110ea15a88913ebc +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py311h459d7ec_1.conda#45b8d355bbcdd27588c2d266bcfdff84 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py311h320fe9a_1.conda#a4371a95a8ae703a22949af28467b93d +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py311h320fe9a_0.conda#3ea3486e16d559dfcb539070ed330a1e https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311h1f0f07a_1.conda#86b71ff85f3e4c8a98b5bace6d9c4565 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py311h64a7726_1.conda#e4b4d3b764e2d029477d0db88248a8b5 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311he06c224_0.conda#c90e2469d7512f3bba893533a82d7a02 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_3.conda#4ac4de995f18d232af077e7743568b97 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.6-pyhd8ed1ab_0.conda#fb1fc875719e217ed799a7aae11d3be4 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_4.conda#1e105c1a8ea2163507726144b401eb1b +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py311h54ef318_2.conda#5655371cc61b8c31c369a7e709acb294 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311he8ad708_103.conda#97b45ba4ff4e46a07dd6c60040256538 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.1-py311h54ef318_0.conda#201fdabdb86bb8fb6e99fa3f0dab8122 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py311he8ad708_100.conda#597b1ad6cb7011b7561c20ea30295cae https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_1.conda#cd36a89a048ad2bcc6d8b43f648fb1d0 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_1.conda#10d1806e20da040c58c36deddf51c70c https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_5.conda#ec7e45bc76d9d0b69a74a2075932b8e8 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py311h38be061_2.conda#0289918d4a09bbd0b85fd23ddf1c3ac1 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.1-py311h38be061_0.conda#8a21cbbb87357c701fa44f4cfa4e23d7 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.3-pyhd8ed1ab_1.conda#fbe2993dd48f14724b90bf12e92cc164 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 167fc29e4c..4a7d83d4c7 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: cc8b627bc99f75128e66e8d5f19fad191f76de7f27898db96e0eef7d6dc6e83a +# input_hash: 26c72df308ccfddf5aa1ad644bf5158095cf3032f3abe9322a6f1cdaab977a7c @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,18 +9,18 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-4_cp39.conda#bfe4b3259a8ac6cdf0037752904da6a7 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda#e2042154faafe61969556f28bade94b9 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.20.1-hd590300_0.conda#6642e4faa4804be3a0e7edfefbd16595 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.21.0-hd590300_0.conda#c06fa0440048270817b9e3142cc661bf https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -36,11 +36,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-hd590300_1.conda#854e3e1623b39777140f199c5f9ab952 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 @@ -49,9 +49,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda#7bb88ce04c8deb9f7d763ae04a1da72f +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.4-hd590300_0.conda#412ba6938c3e2abaca8b1129ea82e238 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -74,21 +74,21 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.2-h2797004_0.conda#4b441a1ee22397d5a27dc1126b849edd +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda#f3858448893839820d4bcfb14ad3ecdf +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_5.conda#1e8ef4090ca4f0d66404a7441e1dbf3c -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_6.conda#80bf3b277c120dd294b51d404b931a75 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda#513336054f884f95d9fd925748f41ef3 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 @@ -96,16 +96,16 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_5.conda#b72f016c910ff9295b1377d3e17da3f2 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.2-h2c6b66d_0.conda#c37b95bcd6c6833dacfd5df0ae2f4303 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.0-h2c6b66d_0.conda#df56c636df4a98990462d66ac7be2330 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 @@ -120,22 +120,22 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f2 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_1.conda#c48418c8b35f1d59ae9ae1174812b40a https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.0-pyhd8ed1ab_0.conda#fef8ef5f0a54546b9efee39468229917 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.3-py39h3d6467e_0.conda#13febcb5470ba004eeb3e7883fa66e79 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.5-py39h3d6467e_0.conda#8a666e66408ec097bf7b6d44353d6294 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.4-pyhd8ed1ab_0.conda#5173d4b8267a0699a43d73231e0b6596 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.9.2-pyh1a96a4e_0.conda#9d15cd3a0e944594ab528da37dc72ecc +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_1.conda#a50918d10114a0bf80fb46c7cc692058 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -143,11 +143,11 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_1.conda#c9f74d717e5a2847a9f8b779c54130f2 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda#bcddbb497582ece559465b9cd11042e7 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda#420f4e9be59d0dc9133a0f43f7bab3f3 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 @@ -181,7 +181,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_1.conda#cbe186eefb0bcd91e8f47c3908489874 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hd1e30aa_0.conda#1da984bbb6e765743e13388ba7b7b2c8 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda#1ccd092478b3e0ee10d7a891adbf8a4f +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.3-pyhd8ed1ab_0.conda#3fc026b9c87d091c4b34a6c997324ae8 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -189,79 +189,79 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.0-pyhd8ed1ab_0.conda#22541af7a9eb59fc6afcadb7ecdf9219 +https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda#ac992767d7f8ed2cb27e71e78f0fb2d7 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_1.conda#e5b62f0c1f96413116f16d33973f1a44 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.43.1-py39hd1e30aa_0.conda#74b032179f7782051800908cb2250132 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.3-py39hd1e30aa_0.conda#873fb1d81f9e9220d605c6b05a96544c +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_1.conda#8d7242302bb3d03b9a690b6dda872603 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1ab_0.conda#48b0d98e0c0ec810d3ccc2a0926c8c0e +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda#93dd9ab275ad888ed8113953769af78c +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda#d12374af44575413fbbd4a217d46ea33 https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda#a1244707531e5b143c420c70573c8ec5 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda#9f100edf65436e3eabc2a51fc00b2c37 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py39had0adad_2.conda#4d5990bb620ed36b10a528324d9b75e3 -https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_1.conda#900fd11ac61d4415d515583fcb570207 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py39had0adad_0.conda#eeaa413fddccecb2ab7f747bdb55b07f +https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda#6dd662ff5ac9a783e5c940ce9f3fe649 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py39h3d6467e_1.conda#39d2473881976eeb57c09c106d2d9fc3 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py39h3d6467e_0.conda#e667a3ab0df62c54e60e1843d2e6defb https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.6-pyhd8ed1ab_0.conda#d5f8944ff9ab24a292511c83dce33dea -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.0-pyhd8ed1ab_0.conda#6a62c2cc25376a0d050b3d1d221c3ee9 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py39h474f0d3_0.conda#62f1d2e05327bf62728afa448f2a9261 -https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39hce394fd_2.conda#cb5ecd8db6d8ca8b9f281658a8512433 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39hce394fd_4.conda#4b6e79000ec3a495f429b2c1092ed63b https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py39h3d6467e_5.conda#93aff412f3e49fdb43361c0215cbd72d -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.4.0-pyhd8ed1ab_0.conda#b8dc6f9db1b9670e564b68277a79ffeb https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39h7633fee_4.conda#b66595fbda99771266f042f42c7457be -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h44dd56e_2.conda#bb788b462770a49433d7412e7881d917 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py39h7633fee_1.conda#33afb3357cd0d120ecb26778d37579e4 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.9.3-pyhd8ed1ab_0.conda#a7155483171dbc27a7385d1c26e779de -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.30-pyhd8ed1ab_0.conda#b7a2e3bb89bda8c69839485c20aabadf -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py39h44dd56e_0.conda#baea2f5dfb3ab7b1c836385d2e1daca7 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39h7633fee_0.conda#ed71ad3e30eb03da363fb797419cce98 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.31-pyhd8ed1ab_0.conda#fea10604a45e974b110ea15a88913ebc +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py39hd1e30aa_1.conda#ca63612907462c8e36edcc9bbacc253e https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.1-py39hddac248_1.conda#f32809db710b8aac48fbc14c13058530 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py39hddac248_0.conda#961b398d8c421a3752e26f01f2dcbdac https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h44dd56e_1.conda#d037c20e3da2e85f03ebd20ad480c359 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py39h474f0d3_1.conda#55441724fedb3042d38ffa5220f00804 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py39h1bc45ef_0.conda#ca067895d22f8a0d38f225a95184858e https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.4-pyhd8ed1ab_0.conda#c3feaf947264a59a125e8c26e98c3c5a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h44dd56e_3.conda#cbc2fe7741df3546448a534827238c32 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.9.3-pyhd8ed1ab_0.conda#543fafdd7b325bf16199235ee5f20622 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.6-pyhd8ed1ab_0.conda#fb1fc875719e217ed799a7aae11d3be4 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h44dd56e_4.conda#81310d21bf9d91754c1220c585bb72d6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py39he9076e7_2.conda#404144d0628ebbbbd56d161c677cc71b -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4282601_103.conda#c61de71bd3099973376aa370e3a0b39e +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.1-py39he9076e7_0.conda#89615b866cb3b0d8ad4e2a11e2bcf9a0 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py39h4282601_100.conda#d2809fbf0d8ae7b8ca92c456cb44a7d4 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h44dd56e_1.conda#90c5165691fdcb5a9f43907e32ea48b4 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39hddac248_1.conda#8dd2eb1e7aa9a33a92a75bdcea3f0dd0 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_5.conda#e1f148e57d071b09187719df86f513c1 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py39hf3d152e_2.conda#ffe5ae58957da676064e2ce5d039d259 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.1-pyhd8ed1ab_0.conda#78153addf629c51fab775ef360012ca3 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.1-py39hf3d152e_0.conda#f8b1cf66dbdbc9fe1a298a11fddcfb05 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.3-pyhd8ed1ab_1.conda#fbe2993dd48f14724b90bf12e92cc164 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 From 507c34c077bf3d145784308965c85c6ab980ba09 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Thu, 23 Nov 2023 15:12:55 +0000 Subject: [PATCH 088/134] Mergeback of `FEATURE_chunk_control` branch (#5588) * Merge chunk control code into latest iris (#5565) * Dask chunking control for netcdf loading. * renamed loader * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix indentation error, perhaps also docstring error * fixed result error in loader, and set tests to treat as big files * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * trial and error, solve non iterable tuple 1.0 * trial and error, solve non iterable tuple 2.0 (used if var is none: instead of if var: ) * commented out docstring * fixed mock 'no name' failure * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fixed precommit issues * corrected docstrings as per review comments * Removed unnecessary line Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --------- Co-authored-by: Patrick Peglar Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Chunk control modes (#5575) * added modes * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * added as_dask mode * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * cleaned up enum and as_dask, as per review comments * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * corrected to in final place * unindented lazy_param assignment one indent --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> * Corrected required type of dimension_chunksizes. (#5581) * Chunk Control Tests (#5583) * converted tests to pytest, added neg_one, and incomplete from_file and as_dask tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * added from_file test * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * added mocking tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * trial and error with mocks and patches, may or may not work * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * converted Mock to patch in as_dask test * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * review comment changes * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * pre commit fixes * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * review comments, and added test in test__get_cf_var_data() * added in another test * added tests and fixed review comments * added AuxCoord test --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> * Chunk control minor fixes (#5593) * Disallow chunks=None in optimum_chunksize. * Clearer docstrings. * Corrected docstring. * Chunk Control documentation (#5597) * init PR, skeleton TP * whoops, missed the TP. * fixed doctests in rst file * correct triple chevron to elipses * updated set doctest to better show functionality * removed in-progress doctest code * Review comments, part 1 * Review comments, part 2 * changed numpy docs dict * wait, this way is better * fixed linkcheck failures (maybe) * fixed :meth: * fixed a couple doc bits * hopefully fixed doctests * newest review comments * fixed rendering, and wording in docstring * fixed docstring numpyness * What's New Entry (#5601) * written whatsnew entry * added ref * moved label to before title --------- Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> Co-authored-by: Patrick Peglar Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- docs/src/techpapers/index.rst | 1 + docs/src/techpapers/netcdf_io.rst | 140 ++++++++++ docs/src/whatsnew/latest.rst | 8 + lib/iris/_lazy_data.py | 203 ++++++++++----- lib/iris/fileformats/netcdf/loader.py | 243 +++++++++++++++++- .../netcdf/loader/test__chunk_control.py | 216 ++++++++++++++++ .../netcdf/loader/test__get_cf_var_data.py | 13 +- .../tests/unit/lazy_data/test_as_lazy_data.py | 24 +- 8 files changed, 768 insertions(+), 80 deletions(-) create mode 100644 docs/src/techpapers/netcdf_io.rst create mode 100644 lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py diff --git a/docs/src/techpapers/index.rst b/docs/src/techpapers/index.rst index 773c8f7059..e97a87f39c 100644 --- a/docs/src/techpapers/index.rst +++ b/docs/src/techpapers/index.rst @@ -11,3 +11,4 @@ Extra information on specific technical issues. um_files_loading.rst missing_data_handling.rst + netcdf_io.rst diff --git a/docs/src/techpapers/netcdf_io.rst b/docs/src/techpapers/netcdf_io.rst new file mode 100644 index 0000000000..e151b2b7c1 --- /dev/null +++ b/docs/src/techpapers/netcdf_io.rst @@ -0,0 +1,140 @@ +.. testsetup:: chunk_control + + import iris + from iris.fileformats.netcdf.loader import CHUNK_CONTROL + + from pathlib import Path + import dask + import shutil + import tempfile + + tmp_dir = Path(tempfile.mkdtemp()) + tmp_filepath = tmp_dir / "tmp.nc" + + cube = iris.load(iris.sample_data_path("E1_north_america.nc"))[0] + iris.save(cube, tmp_filepath, chunksizes=(120, 37, 49)) + old_dask = dask.config.get("array.chunk-size") + dask.config.set({'array.chunk-size': '500KiB'}) + + +.. testcleanup:: chunk_control + + dask.config.set({'array.chunk-size': old_dask}) + shutil.rmtree(tmp_dir) + +.. _netcdf_io: + +============================= +NetCDF I/O Handling in Iris +============================= + +This document provides a basic account of how Iris loads and saves NetCDF files. + +.. admonition:: Under Construction + + This document is still a work in progress, so might include blank or unfinished sections, + watch this space! + + +Chunk Control +-------------- + +Default Chunking +^^^^^^^^^^^^^^^^ + +Chunks are, by default, optimised by Iris on load. This will automatically +decide the best chunksize for your data without any user input. This is +calculated based on a number of factors, including: + +- File Variable Chunking +- Full Variable Shape +- Dask Default Chunksize +- Dimension Order: Earlier (outer) dimensions will be prioritised to be split over later (inner) dimensions. + +.. doctest:: chunk_control + + >>> cube = iris.load_cube(tmp_filepath) + >>> + >>> print(cube.shape) + (240, 37, 49) + >>> print(cube.core_data().chunksize) + (60, 37, 49) + +For more user control, functionality was updated in :pull:`5588`, with the +creation of the :data:`iris.fileformats.netcdf.loader.CHUNK_CONTROL` class. + +Custom Chunking: Set +^^^^^^^^^^^^^^^^^^^^ + +There are three context manangers within :data:`~iris.fileformats.netcdf.loader.CHUNK_CONTROL`. The most basic is +:meth:`~iris.fileformats.netcdf.loader.ChunkControl.set`. This allows you to specify the chunksize for each dimension, +and to specify a ``var_name`` specifically to change. + +Using ``-1`` in place of a chunksize will ensure the chunksize stays the same +as the shape, i.e. no optimisation occurs on that dimension. + +.. doctest:: chunk_control + + >>> with CHUNK_CONTROL.set("air_temperature", time=180, latitude=-1, longitude=25): + ... cube = iris.load_cube(tmp_filepath) + >>> + >>> print(cube.core_data().chunksize) + (180, 37, 25) + +Note that ``var_name`` is optional, and that you don't need to specify every dimension. If you +specify only one dimension, the rest will be optimised using Iris' default behaviour. + +.. doctest:: chunk_control + + >>> with CHUNK_CONTROL.set(longitude=25): + ... cube = iris.load_cube(tmp_filepath) + >>> + >>> print(cube.core_data().chunksize) + (120, 37, 25) + +Custom Chunking: From File +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The second context manager is :meth:`~iris.fileformats.netcdf.loader.ChunkControl.from_file`. +This takes chunksizes as defined in the NetCDF file. Any dimensions without specified chunks +will default to Iris optimisation. + +.. doctest:: chunk_control + + >>> with CHUNK_CONTROL.from_file(): + ... cube = iris.load_cube(tmp_filepath) + >>> + >>> print(cube.core_data().chunksize) + (120, 37, 49) + +Custom Chunking: As Dask +^^^^^^^^^^^^^^^^^^^^^^^^ + +The final context manager, :meth:`~iris.fileformats.netcdf.loader.ChunkControl.as_dask`, bypasses +Iris' optimisation all together, and will take its chunksizes from Dask's behaviour. + +.. doctest:: chunk_control + + >>> with CHUNK_CONTROL.as_dask(): + ... cube = iris.load_cube(tmp_filepath) + >>> + >>> print(cube.core_data().chunksize) + (70, 37, 49) + + +Split Attributes +----------------- + +TBC + + +Deferred Saving +---------------- + +TBC + + +Guess Axis +----------- + +TBC diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 93919216c7..3f2f9a1fd9 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -59,6 +59,10 @@ This document explains the changes made to Iris for this release intervention preventing :func:`~iris.util.guess_coord_axis` from acting on a coordinate. (:pull:`5551`) +#. `@pp-mo`_, `@trexfeathers`_ and `@ESadek-MO`_ added more control over + NetCDF chunking with the use of the :data:`iris.fileformats.netcdf.loader.CHUNK_CONTROL` + context manager. (:pull:`5588`) + 🐛 Bugs Fixed ============= @@ -118,6 +122,10 @@ This document explains the changes made to Iris for this release #. `@ESadek-MO`_ added a phrasebook for synonymous terms used in similar packages. (:pull:`5564`) +#. `@ESadek-MO`_ and `@trexfeathers`_ created a technical paper for NetCDF + saving and loading, :ref:`netcdf_io` with a section on chunking, and placeholders + for further topics. (:pull:`5588`) + 💼 Internal =========== diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index fb29f411d3..11477a2fa6 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -61,6 +61,7 @@ def _optimum_chunksize_internals( shape, limit=None, dtype=np.dtype("f4"), + dims_fixed=None, dask_array_chunksize=dask.config.get("array.chunk-size"), ): """ @@ -70,8 +71,8 @@ def _optimum_chunksize_internals( Args: - * chunks (tuple of int, or None): - Pre-existing chunk shape of the target data : None if unknown. + * chunks (tuple of int): + Pre-existing chunk shape of the target data. * shape (tuple of int): The full array shape of the target data. * limit (int): @@ -79,6 +80,11 @@ def _optimum_chunksize_internals( :mod:`dask.config`. * dtype (np.dtype): Numpy dtype of target data. + * dims_fixed (list of bool): + If set, a list of values equal in length to 'chunks' or 'shape'. + 'True' values indicate a dimension that can not be changed, i.e. that + element of the result must equal the corresponding value in 'chunks' or + data.shape. Returns: * chunk (tuple of int): @@ -99,6 +105,7 @@ def _optimum_chunksize_internals( "chunks = [c[0] for c in normalise_chunks('auto', ...)]". """ + # Set the chunksize limit. if limit is None: # Fetch the default 'optimal' chunksize from the dask config. @@ -108,58 +115,90 @@ def _optimum_chunksize_internals( point_size_limit = limit / dtype.itemsize - # Create result chunks, starting with a copy of the input. - result = list(chunks) - - if np.prod(result) < point_size_limit: - # If size is less than maximum, expand the chunks, multiplying later - # (i.e. inner) dims first. - i_expand = len(shape) - 1 - while np.prod(result) < point_size_limit and i_expand >= 0: - factor = np.floor(point_size_limit * 1.0 / np.prod(result)) - new_dim = result[i_expand] * int(factor) - if new_dim >= shape[i_expand]: - # Clip to dim size : chunk dims must not exceed the full shape. - new_dim = shape[i_expand] - else: - # 'new_dim' is less than the relevant dim of 'shape' -- but it - # is also the largest possible multiple of the input-chunks, - # within the size limit. - # So : 'i_expand' is the outer (last) dimension over which we - # will multiply the input chunks, and 'new_dim' is a value that - # ensures the fewest possible chunks within that dim. - - # Now replace 'new_dim' with the value **closest to equal-size - # chunks**, for the same (minimum) number of chunks. - # More-equal chunks are practically better. - # E.G. : "divide 8 into multiples of 2, with a limit of 7", - # produces new_dim=6, which would mean chunks of sizes (6, 2). - # But (4, 4) is clearly better for memory and time cost. - - # Calculate how many (expanded) chunks fit into this dimension. - dim_chunks = np.ceil(shape[i_expand] * 1.0 / new_dim) - # Get "ideal" (equal) size for that many chunks. - ideal_equal_chunk_size = shape[i_expand] / dim_chunks - # Use the nearest whole multiple of input chunks >= ideal. - new_dim = int( - result[i_expand] - * np.ceil(ideal_equal_chunk_size / result[i_expand]) - ) - - result[i_expand] = new_dim - i_expand -= 1 + if dims_fixed is not None: + if not np.any(dims_fixed): + dims_fixed = None + + if dims_fixed is None: + # Get initial result chunks, starting with a copy of the input. + working = list(chunks) + else: + # Adjust the operation to ignore the 'fixed' dims. + # (We reconstruct the original later, before return). + chunks = np.array(chunks) + dims_fixed_arr = np.array(dims_fixed) + # Reduce the target size by the fixed size of all the 'fixed' dims. + point_size_limit = point_size_limit // np.prod(chunks[dims_fixed_arr]) + # Work on only the 'free' dims. + original_shape = tuple(shape) + shape = tuple(np.array(shape)[~dims_fixed_arr]) + working = list(chunks[~dims_fixed_arr]) + + if len(working) >= 1: + if np.prod(working) < point_size_limit: + # If size is less than maximum, expand the chunks, multiplying + # later (i.e. inner) dims first. + i_expand = len(shape) - 1 + while np.prod(working) < point_size_limit and i_expand >= 0: + factor = np.floor(point_size_limit * 1.0 / np.prod(working)) + new_dim = working[i_expand] * int(factor) + if new_dim >= shape[i_expand]: + # Clip to dim size : must not exceed the full shape. + new_dim = shape[i_expand] + else: + # 'new_dim' is less than the relevant dim of 'shape' -- but + # it is also the largest possible multiple of the + # input-chunks, within the size limit. + # So : 'i_expand' is the outer (last) dimension over which + # we will multiply the input chunks, and 'new_dim' is a + # value giving the fewest possible chunks within that dim. + + # Now replace 'new_dim' with the value **closest to + # equal-size chunks**, for the same (minimum) number of + # chunks. More-equal chunks are practically better. + # E.G. : "divide 8 into multiples of 2, with a limit of 7", + # produces new_dim=6, meaning chunks of sizes (6, 2). + # But (4, 4) is clearly better for memory and time cost. + + # Calculate how many (expanded) chunks fit in this dim. + dim_chunks = np.ceil(shape[i_expand] * 1.0 / new_dim) + # Get "ideal" (equal) size for that many chunks. + ideal_equal_chunk_size = shape[i_expand] / dim_chunks + # Use the nearest whole multiple of input chunks >= ideal. + new_dim = int( + working[i_expand] + * np.ceil(ideal_equal_chunk_size / working[i_expand]) + ) + + working[i_expand] = new_dim + i_expand -= 1 + else: + # Similarly, reduce if too big, reducing earlier (outer) dims first. + i_reduce = 0 + while np.prod(working) > point_size_limit: + factor = np.ceil(np.prod(working) / point_size_limit) + new_dim = int(working[i_reduce] / factor) + if new_dim < 1: + new_dim = 1 + working[i_reduce] = new_dim + i_reduce += 1 + + working = tuple(working) + + if dims_fixed is None: + result = working else: - # Similarly, reduce if too big, reducing earlier (outer) dims first. - i_reduce = 0 - while np.prod(result) > point_size_limit: - factor = np.ceil(np.prod(result) / point_size_limit) - new_dim = int(result[i_reduce] / factor) - if new_dim < 1: - new_dim = 1 - result[i_reduce] = new_dim - i_reduce += 1 + # Reconstruct the original form + result = [] + for i_dim in range(len(original_shape)): + if dims_fixed[i_dim]: + dim = chunks[i_dim] + else: + dim = working[0] + working = working[1:] + result.append(dim) - return tuple(result) + return result @wraps(_optimum_chunksize_internals) @@ -168,6 +207,7 @@ def _optimum_chunksize( shape, limit=None, dtype=np.dtype("f4"), + dims_fixed=None, ): # By providing dask_array_chunksize as an argument, we make it so that the # output of _optimum_chunksize_internals depends only on its arguments (and @@ -177,11 +217,14 @@ def _optimum_chunksize( tuple(shape), limit=limit, dtype=dtype, + dims_fixed=dims_fixed, dask_array_chunksize=dask.config.get("array.chunk-size"), ) -def as_lazy_data(data, chunks=None, asarray=False): +def as_lazy_data( + data, chunks=None, asarray=False, dims_fixed=None, dask_chunking=False +): """ Convert the input array `data` to a :class:`dask.array.Array`. @@ -200,6 +243,16 @@ def as_lazy_data(data, chunks=None, asarray=False): If True, then chunks will be converted to instances of `ndarray`. Set to False (default) to pass passed chunks through unchanged. + * dims_fixed (list of bool): + If set, a list of values equal in length to 'chunks' or data.ndim. + 'True' values indicate a dimension which can not be changed, i.e. the + result for that index must equal the value in 'chunks' or data.shape. + + * dask_chunking (bool): + If True, Iris chunking optimisation will be bypassed, and dask's default + chunking will be used instead. Including a value for chunks while dask_chunking + is set to True will result in a failure. + Returns: The input array converted to a :class:`dask.array.Array`. @@ -211,24 +264,38 @@ def as_lazy_data(data, chunks=None, asarray=False): but reduced by a factor if that exceeds the dask default chunksize. """ - if chunks is None: - # No existing chunks : Make a chunk the shape of the entire input array - # (but we will subdivide it if too big). - chunks = list(data.shape) - - # Adjust chunk size for better dask performance, - # NOTE: but only if no shape dimension is zero, so that we can handle the - # PPDataProxy of "raw" landsea-masked fields, which have a shape of (0, 0). - if all(elem > 0 for elem in data.shape): - # Expand or reduce the basic chunk shape to an optimum size. - chunks = _optimum_chunksize(chunks, shape=data.shape, dtype=data.dtype) - + if dask_chunking: + if chunks is not None: + raise ValueError( + f"Dask chunking chosen, but chunks already assigned value {chunks}" + ) + lazy_params = {"asarray": asarray, "meta": np.ndarray} + else: + if chunks is None: + # No existing chunks : Make a chunk the shape of the entire input array + # (but we will subdivide it if too big). + chunks = list(data.shape) + + # Adjust chunk size for better dask performance, + # NOTE: but only if no shape dimension is zero, so that we can handle the + # PPDataProxy of "raw" landsea-masked fields, which have a shape of (0, 0). + if all(elem > 0 for elem in data.shape): + # Expand or reduce the basic chunk shape to an optimum size. + chunks = _optimum_chunksize( + chunks, + shape=data.shape, + dtype=data.dtype, + dims_fixed=dims_fixed, + ) + lazy_params = { + "chunks": chunks, + "asarray": asarray, + "meta": np.ndarray, + } if isinstance(data, ma.core.MaskedConstant): data = ma.masked_array(data.data, mask=data.mask) if not is_lazy_data(data): - data = da.from_array( - data, chunks=chunks, asarray=asarray, meta=np.ndarray - ) + data = da.from_array(data, **lazy_params) return data diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index f0ed111687..623d1eb6c7 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -12,7 +12,12 @@ Also : `CF Conventions `_. """ -from collections.abc import Iterable +from collections.abc import Iterable, Mapping +from contextlib import contextmanager +from copy import deepcopy +from enum import Enum, auto +import threading +from typing import Union import warnings import numpy as np @@ -204,6 +209,7 @@ def _get_cf_var_data(cf_var, filename): unnecessarily slow + wasteful of memory. """ + global CHUNK_CONTROL if hasattr(cf_var, "_data_array"): # The variable is not an actual netCDF4 file variable, but an emulating # object with an attached data array (either numpy or dask), which can be @@ -220,6 +226,8 @@ def _get_cf_var_data(cf_var, filename): else: # Get lazy chunked data out of a cf variable. + # Creates Dask wrappers around data arrays for any cube components which + # can have lazy values, e.g. Cube, Coord, CellMeasure, AuxiliaryVariable. dtype = _get_actual_dtype(cf_var) # Make a data-proxy that mimics array access and can fetch from the file. @@ -233,21 +241,59 @@ def _get_cf_var_data(cf_var, filename): ) # Get the chunking specified for the variable : this is either a shape, or # maybe the string "contiguous". - chunks = cf_var.cf_data.chunking() - # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. - if chunks == "contiguous": - chunks = None - - # Return a dask array providing deferred access. - result = as_lazy_data(proxy, chunks=chunks) - + if CHUNK_CONTROL.mode is ChunkControl.Modes.AS_DASK: + result = as_lazy_data(proxy, chunks=None, dask_chunking=True) + else: + chunks = cf_var.cf_data.chunking() + # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. + if chunks == "contiguous": + if ( + CHUNK_CONTROL.mode is ChunkControl.Modes.FROM_FILE + and isinstance( + cf_var, iris.fileformats.cf.CFDataVariable + ) + ): + raise KeyError( + f"{cf_var.cf_name} does not contain pre-existing chunk specifications." + f" Instead, you might wish to use CHUNK_CONTROL.set(), or just use default" + f" behaviour outside of a context manager. " + ) + # Equivalent to chunks=None, but value required by chunking control + chunks = list(cf_var.shape) + + # Modify the chunking in the context of an active chunking control. + # N.B. settings specific to this named var override global ('*') ones. + dim_chunks = CHUNK_CONTROL.var_dim_chunksizes.get( + cf_var.cf_name + ) or CHUNK_CONTROL.var_dim_chunksizes.get("*") + dims = cf_var.cf_data.dimensions + if CHUNK_CONTROL.mode is ChunkControl.Modes.FROM_FILE: + dims_fixed = np.ones(len(dims), dtype=bool) + elif not dim_chunks: + dims_fixed = None + else: + # Modify the chunks argument, and pass in a list of 'fixed' dims, for + # any of our dims which are controlled. + dims_fixed = np.zeros(len(dims), dtype=bool) + for i_dim, dim_name in enumerate(dims): + dim_chunksize = dim_chunks.get(dim_name) + if dim_chunksize: + if dim_chunksize == -1: + chunks[i_dim] = cf_var.shape[i_dim] + else: + chunks[i_dim] = dim_chunksize + dims_fixed[i_dim] = True + if dims_fixed is None: + dims_fixed = [dims_fixed] + result = as_lazy_data( + proxy, chunks=chunks, dims_fixed=tuple(dims_fixed) + ) return result class _OrderedAddableList(list): """ A custom container object for actions recording. - Used purely in actions debugging, to accumulate a record of which actions were activated. @@ -270,6 +316,18 @@ def add(self, msg): def _load_cube(engine, cf, cf_var, filename): + global CHUNK_CONTROL + + # Translate dimension chunk-settings specific to this cube (i.e. named by + # it's data-var) into global ones, for the duration of this load. + # Thus, by default, we will create any AuxCoords, CellMeasures et al with + # any per-dimension chunksizes specified for the cube. + these_settings = CHUNK_CONTROL.var_dim_chunksizes.get(cf_var.cf_name, {}) + with CHUNK_CONTROL.set(**these_settings): + return _load_cube_inner(engine, cf, cf_var, filename) + + +def _load_cube_inner(engine, cf, cf_var, filename): from iris.cube import Cube """Create the cube associated with the CF-netCDF data variable.""" @@ -614,3 +672,168 @@ def load_cubes(file_sources, callback=None, constraints=None): continue yield cube + + +class ChunkControl(threading.local): + class Modes(Enum): + DEFAULT = auto() + FROM_FILE = auto() + AS_DASK = auto() + + def __init__(self, var_dim_chunksizes=None): + """ + Provide user control of Dask chunking. + + The NetCDF loader is controlled by the single instance of this: the + :data:`~iris.fileformats.netcdf.loader.CHUNK_CONTROL` object. + + A chunk size can be set for a specific (named) file dimension, when + loading specific (named) variables, or for all variables. + + When a selected variable is a CF data-variable, which loads as a + :class:`~iris.cube.Cube`, then the given dimension chunk size is *also* + fixed for all variables which are components of that :class:`~iris.cube.Cube`, + i.e. any :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, + :class:`~iris.coords.AncillaryVariable` etc. + This can be overridden, if required, by variable-specific settings. + + For this purpose, :class:`~iris.experimental.ugrid.mesh.MeshCoord` and + :class:`~iris.experimental.ugrid.mesh.Connectivity` are not + :class:`~iris.cube.Cube` components, and chunk control on a + :class:`~iris.cube.Cube` data-variable will not affect them. + + """ + self.var_dim_chunksizes = var_dim_chunksizes or {} + self.mode = self.Modes.DEFAULT + + @contextmanager + def set( + self, + var_names: Union[str, Iterable[str]] = None, + **dimension_chunksizes: Mapping[str, int], + ) -> None: + """ + Control the Dask chunk sizes applied to NetCDF variables during loading. + + Parameters + ---------- + var_names : str or list of str, default=None + apply the `dimension_chunksizes` controls only to these variables, + or when building :class:`~iris.cube.Cube`\\ s from these data variables. + If ``None``, settings apply to all loaded variables. + dimension_chunksizes : dict of {str: int} + Kwargs specifying chunksizes for dimensions of file variables. + Each key-value pair defines a chunk size for a named file + dimension, e.g. ``{'time': 10, 'model_levels':1}``. + Values of ``-1`` will lock the chunk size to the full size of that + dimension. + + Notes + ----- + This function acts as a context manager, for use in a ``with`` block. + + >>> import iris + >>> from iris.fileformats.netcdf.loader import CHUNK_CONTROL + >>> with CHUNK_CONTROL.set("air_temperature", time=180, latitude=-1): + ... cube = iris.load(iris.sample_data_path("E1_north_america.nc"))[0] + + When `var_names` is present, the chunk size adjustments are applied + only to the selected variables. However, for a CF data variable, this + extends to all components of the (raw) :class:`~iris.cube.Cube` created + from it. + + **Un**-adjusted dimensions have chunk sizes set in the 'usual' way. + That is, according to the normal behaviour of + :func:`iris._lazy_data.as_lazy_data`, which is: chunk size is based on + the file variable chunking, or full variable shape; this is scaled up + or down by integer factors to best match the Dask default chunk size, + i.e. the setting configured by + ``dask.config.set({'array.chunk-size': '250MiB'})``. + + """ + old_mode = self.mode + old_var_dim_chunksizes = deepcopy(self.var_dim_chunksizes) + if var_names is None: + var_names = ["*"] + elif isinstance(var_names, str): + var_names = [var_names] + try: + for var_name in var_names: + # Note: here we simply treat '*' as another name. + # A specific name match should override a '*' setting, but + # that is implemented elsewhere. + if not isinstance(var_name, str): + msg = ( + "'var_names' should be an iterable of strings, " + f"not {var_names!r}." + ) + raise ValueError(msg) + dim_chunks = self.var_dim_chunksizes.setdefault(var_name, {}) + for dim_name, chunksize in dimension_chunksizes.items(): + if not ( + isinstance(dim_name, str) + and isinstance(chunksize, int) + ): + msg = ( + "'dimension_chunksizes' kwargs should be a dict " + f"of `str: int` pairs, not {dimension_chunksizes!r}." + ) + raise ValueError(msg) + dim_chunks[dim_name] = chunksize + yield + finally: + self.var_dim_chunksizes = old_var_dim_chunksizes + self.mode = old_mode + + @contextmanager + def from_file(self) -> None: + """ + Ensures the chunk sizes are loaded in from NetCDF file variables. + + Raises + ------ + KeyError + If any NetCDF data variables - those that become + :class:`~iris.cube.Cube`\\ s - do not specify chunk sizes. + + Notes + ----- + This function acts as a context manager, for use in a ``with`` block. + """ + old_mode = self.mode + old_var_dim_chunksizes = deepcopy(self.var_dim_chunksizes) + try: + self.mode = self.Modes.FROM_FILE + yield + finally: + self.mode = old_mode + self.var_dim_chunksizes = old_var_dim_chunksizes + + @contextmanager + def as_dask(self) -> None: + """ + Relies on Dask :external+dask:doc:`array` to control chunk sizes. + + Notes + ----- + This function acts as a context manager, for use in a ``with`` block. + """ + old_mode = self.mode + old_var_dim_chunksizes = deepcopy(self.var_dim_chunksizes) + try: + self.mode = self.Modes.AS_DASK + yield + finally: + self.mode = old_mode + self.var_dim_chunksizes = old_var_dim_chunksizes + + +# Note: the CHUNK_CONTROL object controls chunk sizing in the +# :meth:`_get_cf_var_data` method. +# N.B. :meth:`_load_cube` also modifies this when loading each cube, +# introducing an additional context in which any cube-specific settings are +# 'promoted' into being global ones. + +#: The global :class:`ChunkControl` object providing user-control of Dask chunking +#: when Iris loads NetCDF files. +CHUNK_CONTROL: ChunkControl = ChunkControl() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py new file mode 100644 index 0000000000..7249c39829 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py @@ -0,0 +1,216 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for :class:`iris.fileformats.netcdf.loader.ChunkControl`.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip +from unittest.mock import ANY, patch + +import dask +import numpy as np +import pytest + +import iris +from iris.cube import CubeList +from iris.fileformats.netcdf import loader +from iris.fileformats.netcdf.loader import CHUNK_CONTROL +import iris.tests.stock as istk + + +@pytest.fixture() +def save_cubelist_with_sigma(tmp_filepath): + cube = istk.simple_4d_with_hybrid_height() + cube_varname = "my_var" + sigma_varname = "my_sigma" + cube.var_name = cube_varname + cube.coord("sigma").var_name = sigma_varname + cube.coord("sigma").guess_bounds() + iris.save(cube, tmp_filepath) + return cube_varname, sigma_varname + + +@pytest.fixture +def save_cube_with_chunksize(tmp_filepath): + cube = istk.simple_3d() + # adding an aux coord allows us to test that + # iris.fileformats.netcdf.loader._get_cf_var_data() + # will only throw an error if from_file mode is + # True when the entire cube has no specified chunking + aux = iris.coords.AuxCoord( + points=np.zeros((3, 4)), + long_name="random", + units="1", + ) + cube.add_aux_coord(aux, [1, 2]) + iris.save(cube, tmp_filepath, chunksizes=(1, 3, 4)) + + +@pytest.fixture(scope="session") +def tmp_filepath(tmp_path_factory): + tmp_dir = tmp_path_factory.mktemp("data") + tmp_path = tmp_dir / "tmp.nc" + return str(tmp_path) + + +@pytest.fixture(autouse=True) +def remove_min_bytes(): + old_min_bytes = loader._LAZYVAR_MIN_BYTES + loader._LAZYVAR_MIN_BYTES = 0 + yield + loader._LAZYVAR_MIN_BYTES = old_min_bytes + + +def test_default(tmp_filepath, save_cubelist_with_sigma): + cube_varname, _ = save_cubelist_with_sigma + cubes = CubeList(loader.load_cubes(tmp_filepath)) + cube = cubes.extract_cube(cube_varname) + assert cube.shape == (3, 4, 5, 6) + assert cube.lazy_data().chunksize == (3, 4, 5, 6) + + sigma = cube.coord("sigma") + assert sigma.shape == (4,) + assert sigma.lazy_points().chunksize == (4,) + assert sigma.lazy_bounds().chunksize == (4, 2) + + +def test_control_global(tmp_filepath, save_cubelist_with_sigma): + cube_varname, _ = save_cubelist_with_sigma + with CHUNK_CONTROL.set(model_level_number=2): + cubes = CubeList(loader.load_cubes(tmp_filepath)) + cube = cubes.extract_cube(cube_varname) + assert cube.shape == (3, 4, 5, 6) + assert cube.lazy_data().chunksize == (3, 2, 5, 6) + + sigma = cube.coord("sigma") + assert sigma.shape == (4,) + assert sigma.lazy_points().chunksize == (2,) + assert sigma.lazy_bounds().chunksize == (2, 2) + + +def test_control_sigma_only(tmp_filepath, save_cubelist_with_sigma): + cube_varname, sigma_varname = save_cubelist_with_sigma + with CHUNK_CONTROL.set(sigma_varname, model_level_number=2): + cubes = CubeList(loader.load_cubes(tmp_filepath)) + cube = cubes.extract_cube(cube_varname) + assert cube.shape == (3, 4, 5, 6) + assert cube.lazy_data().chunksize == (3, 4, 5, 6) + + sigma = cube.coord("sigma") + assert sigma.shape == (4,) + assert sigma.lazy_points().chunksize == (2,) + # N.B. this does not apply to bounds array + assert sigma.lazy_bounds().chunksize == (4, 2) + + +def test_control_cube_var(tmp_filepath, save_cubelist_with_sigma): + cube_varname, _ = save_cubelist_with_sigma + with CHUNK_CONTROL.set(cube_varname, model_level_number=2): + cubes = CubeList(loader.load_cubes(tmp_filepath)) + cube = cubes.extract_cube(cube_varname) + assert cube.shape == (3, 4, 5, 6) + assert cube.lazy_data().chunksize == (3, 2, 5, 6) + + sigma = cube.coord("sigma") + assert sigma.shape == (4,) + assert sigma.lazy_points().chunksize == (2,) + assert sigma.lazy_bounds().chunksize == (2, 2) + + +def test_invalid_chunksize(tmp_filepath, save_cubelist_with_sigma): + with pytest.raises(ValueError): + with CHUNK_CONTROL.set(model_level_numer="2"): + CubeList(loader.load_cubes(tmp_filepath)) + + +def test_invalid_var_name(tmp_filepath, save_cubelist_with_sigma): + with pytest.raises(ValueError): + with CHUNK_CONTROL.set([1, 2], model_level_numer="2"): + CubeList(loader.load_cubes(tmp_filepath)) + + +def test_control_multiple(tmp_filepath, save_cubelist_with_sigma): + cube_varname, sigma_varname = save_cubelist_with_sigma + with CHUNK_CONTROL.set( + cube_varname, model_level_number=2 + ), CHUNK_CONTROL.set(sigma_varname, model_level_number=3): + cubes = CubeList(loader.load_cubes(tmp_filepath)) + cube = cubes.extract_cube(cube_varname) + assert cube.shape == (3, 4, 5, 6) + assert cube.lazy_data().chunksize == (3, 2, 5, 6) + + sigma = cube.coord("sigma") + assert sigma.shape == (4,) + assert sigma.lazy_points().chunksize == (3,) + assert sigma.lazy_bounds().chunksize == (2, 2) + + +def test_neg_one(tmp_filepath, save_cubelist_with_sigma): + cube_varname, _ = save_cubelist_with_sigma + with dask.config.set({"array.chunk-size": "50B"}): + with CHUNK_CONTROL.set(model_level_number=-1): + cubes = CubeList(loader.load_cubes(tmp_filepath)) + cube = cubes.extract_cube(cube_varname) + assert cube.shape == (3, 4, 5, 6) + # uses known good output + assert cube.lazy_data().chunksize == (1, 4, 1, 1) + + sigma = cube.coord("sigma") + assert sigma.shape == (4,) + assert sigma.lazy_points().chunksize == (4,) + assert sigma.lazy_bounds().chunksize == (4, 1) + + +def test_from_file(tmp_filepath, save_cube_with_chunksize): + with CHUNK_CONTROL.from_file(): + cube = next(loader.load_cubes(tmp_filepath)) + assert cube.shape == (2, 3, 4) + assert cube.lazy_data().chunksize == (1, 3, 4) + + +def test_no_chunks_from_file(tmp_filepath, save_cubelist_with_sigma): + cube_varname, _ = save_cubelist_with_sigma + with pytest.raises(KeyError): + with CHUNK_CONTROL.from_file(): + CubeList(loader.load_cubes(tmp_filepath)) + + +def test_as_dask(tmp_filepath, save_cubelist_with_sigma): + """ + This does not test return values, as we can't be sure + dask chunking behaviour won't change, or that it will differ + from our own chunking behaviour. + """ + message = "Mock called, rest of test unneeded" + with patch("iris.fileformats.netcdf.loader.as_lazy_data") as as_lazy_data: + as_lazy_data.side_effect = RuntimeError(message) + with CHUNK_CONTROL.as_dask(): + try: + CubeList(loader.load_cubes(tmp_filepath)) + except RuntimeError as e: + if str(e) != message: + raise e + as_lazy_data.assert_called_with(ANY, chunks=None, dask_chunking=True) + + +def test_pinned_optimisation(tmp_filepath, save_cubelist_with_sigma): + cube_varname, _ = save_cubelist_with_sigma + with dask.config.set({"array.chunk-size": "250B"}): + with CHUNK_CONTROL.set(model_level_number=2): + cubes = CubeList(loader.load_cubes(tmp_filepath)) + cube = cubes.extract_cube(cube_varname) + assert cube.shape == (3, 4, 5, 6) + # uses known good output + # known good output WITHOUT pinning: (1, 1, 5, 6) + assert cube.lazy_data().chunksize == (1, 2, 2, 6) + + sigma = cube.coord("sigma") + assert sigma.shape == (4,) + assert sigma.lazy_points().chunksize == (2,) + assert sigma.lazy_bounds().chunksize == (2, 2) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py index 3c3cbff7f4..caece8b6bc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py @@ -14,7 +14,7 @@ from iris._lazy_data import _optimum_chunksize import iris.fileformats.cf -from iris.fileformats.netcdf.loader import _get_cf_var_data +from iris.fileformats.netcdf.loader import CHUNK_CONTROL, _get_cf_var_data class Test__get_cf_var_data(tests.IrisTest): @@ -29,6 +29,7 @@ def _make( cf_data = mock.MagicMock( _FillValue=None, __getitem__="", + dimensions=["dim_" + str(x) for x in range(len(shape or "1"))], ) cf_data.chunking = mock.MagicMock(return_value=chunksizes) if shape is None: @@ -60,6 +61,16 @@ def test_cf_data_chunks(self): expected_chunks = _optimum_chunksize(chunks, self.shape) self.assertArrayEqual(lazy_data_chunks, expected_chunks) + def test_cf_data_chunk_control(self): + # more thorough testing can be found at `test__chunk_control` + chunks = [2500, 240, 200] + cf_var = self._make(shape=(2500, 240, 200), chunksizes=chunks) + with CHUNK_CONTROL.set(dim_0=25, dim_1=24, dim_2=20): + lazy_data = _get_cf_var_data(cf_var, self.filename) + lazy_data_chunks = [c[0] for c in lazy_data.chunks] + expected_chunks = (25, 24, 20) + self.assertArrayEqual(lazy_data_chunks, expected_chunks) + def test_cf_data_no_chunks(self): # No chunks means chunks are calculated from the array's shape by # `iris._lazy_data._optimum_chunksize()`. diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py index 0acb085830..2222d185c3 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py @@ -41,6 +41,25 @@ def test_non_default_chunks(self): (result,) = np.unique(lazy_data.chunks) self.assertEqual(result, 24) + def test_dask_chunking(self): + data = np.arange(24) + chunks = (12,) + optimum = self.patch("iris._lazy_data._optimum_chunksize") + optimum.return_value = chunks + _ = as_lazy_data(data, chunks=None, dask_chunking=True) + self.assertFalse(optimum.called) + + def test_dask_chunking_error(self): + data = np.arange(24) + chunks = (12,) + optimum = self.patch("iris._lazy_data._optimum_chunksize") + optimum.return_value = chunks + with self.assertRaisesRegex( + ValueError, + r"Dask chunking chosen, but chunks already assigned value", + ): + as_lazy_data(data, chunks=chunks, dask_chunking=True) + def test_with_masked_constant(self): masked_data = ma.masked_array([8], mask=True) masked_constant = masked_data[0] @@ -151,7 +170,10 @@ def test_default_chunks_limiting(self): limitcall_patch.call_args_list, [ mock.call( - list(test_shape), shape=test_shape, dtype=np.dtype("f4") + list(test_shape), + shape=test_shape, + dtype=np.dtype("f4"), + dims_fixed=None, ) ], ) From b78de2ec173675c951943937d000ab4b72ca8b51 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Tue, 28 Nov 2023 13:03:14 +0000 Subject: [PATCH 089/134] CI: specify matplotlib-base (#5606) * CI: specify matplotlib-base * whatsnew --- docs/src/whatsnew/latest.rst | 3 + requirements/locks/py310-linux-64.lock | 108 ++++++++---------------- requirements/locks/py311-linux-64.lock | 108 ++++++++---------------- requirements/locks/py39-linux-64.lock | 109 ++++++++----------------- requirements/py310.yml | 2 +- requirements/py311.yml | 2 +- requirements/py39.yml | 2 +- 7 files changed, 102 insertions(+), 232 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 3f2f9a1fd9..de69eddd4e 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -155,6 +155,9 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ corrected various comment spelling mistakes detected by `codespell`_. (:pull:`5546`) +#. `@rcomer`_ reduced the size of the conda environment used for testing. + (:pull:`5606`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 18b8ee256c..631227ca51 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,13 +1,13 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: df35455963a70471a00b88b3c8609117d9379aebcb6472b49d2a621e0d0895fa +# input_hash: 261e2a16d1b94dedb72e8d7119ea263c3e0f5a5c4eb2730980eda055cd4683ec @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda#26322ec5d7712c3ded99dd656142b8ce @@ -17,18 +17,15 @@ https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda# https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 -https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.21.0-hd590300_0.conda#c06fa0440048270817b9e3142cc661bf +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 -https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 @@ -41,17 +38,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 -https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.4-hd590300_0.conda#412ba6938c3e2abaca8b1129ea82e238 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -61,7 +53,6 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 @@ -70,22 +61,16 @@ https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d -https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 -https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_6.conda#80bf3b277c120dd294b51d404b931a75 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc @@ -96,21 +81,12 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f -https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda#f3a8c32aa764c3e7188b4b810fc9d6ce -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.0-h2c6b66d_0.conda#df56c636df4a98990462d66ac7be2330 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 @@ -118,49 +94,42 @@ https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_1.conda#1f95722c94f00b69af69a066c7433714 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.5-py310hc6cd4ac_0.conda#9156537f8d99eb8c45d0f811e8164527 -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py310hc6cd4ac_0.conda#7f987c519edb4df04d21a282678368cf https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_1.conda#a50918d10114a0bf80fb46c7cc692058 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed +https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda#b8d67603d43b23ce7e988a5d81a7ab79 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda#420f4e9be59d0dc9133a0f43f7bab3f3 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda#b74e07a054c479e45a83a83fc5be713c -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py310hd41b1e2_0.conda#03255e1437f31f25ad95bb45c8b398bb +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py310hd41b1e2_0.conda#dc5263dcaa1347e5a456ead3537be27d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h2372a71_1.conda#cb25177acf28cc35cfa6c1ac1c679e22 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 @@ -181,9 +150,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda#b23e0147fa5f7a9380e06334c7266ad5 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda#72637c58d36d9475fda24700c9796f19 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.3-pyhd8ed1ab_0.conda#3fc026b9c87d091c4b34a6c997324ae8 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 @@ -195,76 +162,65 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f9 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda#45846a970e71ac98fd327da5d40a0a2c https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.conda#33c03cd5711885c920ddff676fb84f98 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.3-py310h2372a71_0.conda#b4bfb11c034c257e20159e9001cd8e28 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_1.conda#8d7242302bb3d03b9a690b6dda872603 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py310h2372a71_0.conda#c2dcff257e040bcda00e2a30a9d85333 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda#d12374af44575413fbbd4a217d46ea33 -https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda#9f100edf65436e3eabc2a51fc00b2c37 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py310h01dd4db_0.conda#95d87a906d88b5824d7d36eeef091dba https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py310hc6cd4ac_0.conda#68d5bfccaba2d89a7812098dd3966d9b https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py310hb13e2d6_0.conda#ac3b67e928cc71548efad9b522d42fef +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py310hb13e2d6_0.conda#d3147cfbf72d6ae7bba10562208f6def https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310h32c33b7_4.conda#124211262afed349430d9a3de6b51e8f -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py310hc6cd4ac_5.conda#ef5333594a958b25912002886b82b253 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.4.0-pyhd8ed1ab_0.conda#b8dc6f9db1b9670e564b68277a79ffeb +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hd41b1e2_4.conda#35e87277fba9944b8a975113538bb5df +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py310h1f7b6fc_0.conda#31beda75384647959d5792a1a7dc571a https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py310hd41b1e2_0.conda#85d2aaa7af046528d339da1e813c3a9f https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.31-pyhd8ed1ab_0.conda#fea10604a45e974b110ea15a88913ebc +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py310h2372a71_1.conda#dfcf64f67961eb9686676f96fdb4b4d1 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py310hcc13569_0.conda#30a39c1064e5efc578d83c2a5f7cd749 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h1f7b6fc_1.conda#be6f0382440ccbf9fb01bb19ab1f1fc0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py310hb13e2d6_1.conda#4260b359d8fbeab4f789a8b0f968079f -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py310h7dcad9a_0.conda#0d7c35fe5cc1f436e368ddd500deb979 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py310hb13e2d6_0.conda#f0063b2885bfae11324a00a693f88781 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py310hc3e127f_1.conda#fdaca8d27b3af78d617521eb37b1d055 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.6-pyhd8ed1ab_0.conda#fb1fc875719e217ed799a7aae11d3be4 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h1f7b6fc_4.conda#0ca55ca20891d393846695354b32ebc5 https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.1-py310h62c0568_0.conda#e650bd952e5618050ccb088bc0c6dfb4 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py310h62c0568_0.conda#3cbbc7d0b54df02c9a006d3de14911d9 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py310hba70d50_100.conda#e19392760c7e4da3b9cb0ee5bf61bc4b https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h1f7b6fc_1.conda#857b828a13cdddf568958f7575b25b22 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310hcc13569_1.conda#31ef447724fb19066a9d00a660dab1bd https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_5.conda#f4fe7a6e3d7c78c9de048ea9dda21690 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.1-py310hff52083_0.conda#acd62190c3822df888791592130aa286 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.3-pyhd8ed1ab_1.conda#fbe2993dd48f14724b90bf12e92cc164 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 96509aae97..d85b20ee07 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,13 +1,13 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: f2209792c838739771cbeb38eb5659da1f847d44387a829c931482c65e2f8885 +# input_hash: 51321f928e4883d91354d6e049004532b17675ee9629854a199b34e0854e0bf9 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda#d786502c97404c94d7d58d258a445a65 @@ -17,18 +17,15 @@ https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda# https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 -https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.21.0-hd590300_0.conda#c06fa0440048270817b9e3142cc661bf +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 -https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 @@ -41,17 +38,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 -https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.4-hd590300_0.conda#412ba6938c3e2abaca8b1129ea82e238 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -61,7 +53,6 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 @@ -70,22 +61,16 @@ https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d -https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 -https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_6.conda#80bf3b277c120dd294b51d404b931a75 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc @@ -96,21 +81,12 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f -https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda#b0dfbe2fcbfdb097d321bfd50ecddab1 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.0-h2c6b66d_0.conda#df56c636df4a98990462d66ac7be2330 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 @@ -118,49 +94,42 @@ https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hb755f60_1.conda#cce9e7c3f1c307f2a5fb08a2922d6164 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.5-py311hb755f60_0.conda#25b42509a68f96e612534af3fe2cf033 -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py311hb755f60_0.conda#88cc84238dda72e11285d9cfcbe43e51 https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_1.conda#a50918d10114a0bf80fb46c7cc692058 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed +https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda#420f4e9be59d0dc9133a0f43f7bab3f3 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_1.conda#71120b5155a0c500826cf81536721a15 -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py311h9547e67_0.conda#e826b71bf3dc8c91ee097663e2bcface +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py311h9547e67_0.conda#3ac85c6c226e2a2e4b17864fc2ca88ff https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h459d7ec_1.conda#490d7fa8675afd1aa6f1b2332d156a45 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 @@ -180,9 +149,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_1.conda#a700fcb5cedd3e72d0c75d095c7a6eda https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.3-pyhd8ed1ab_0.conda#3fc026b9c87d091c4b34a6c997324ae8 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 @@ -194,76 +161,65 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f9 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.3-py311h459d7ec_0.conda#a811af88d3c522cf36f4674ef699021d -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_1.conda#8d7242302bb3d03b9a690b6dda872603 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py311h459d7ec_0.conda#5b24692ece82f89e5cb9a469d9619731 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda#d12374af44575413fbbd4a217d46ea33 -https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda#9f100edf65436e3eabc2a51fc00b2c37 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py311ha6c5da5_0.conda#83a988daf5c49e57f7d2086fb6781fe8 https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py311hb755f60_0.conda#02336abab4cb5dd794010ef53c54bd09 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py311h64a7726_0.conda#bf16a9f625126e378302f08e7ed67517 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py311h64a7726_0.conda#fd2f142dcd680413b5ede5d0fb799205 https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_4.conda#75d504c6787edc377ebdba087a26a61b -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py311hb755f60_5.conda#e4d262cc3600e70b505a6761d29f6207 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.4.0-pyhd8ed1ab_0.conda#b8dc6f9db1b9670e564b68277a79ffeb +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py311h1f0f07a_0.conda#b7e6d52b39e199238c3400cafaabafb3 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.31-pyhd8ed1ab_0.conda#fea10604a45e974b110ea15a88913ebc +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py311h459d7ec_1.conda#45b8d355bbcdd27588c2d266bcfdff84 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py311h320fe9a_0.conda#3ea3486e16d559dfcb539070ed330a1e https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311h1f0f07a_1.conda#86b71ff85f3e4c8a98b5bace6d9c4565 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py311h64a7726_1.conda#e4b4d3b764e2d029477d0db88248a8b5 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311he06c224_0.conda#c90e2469d7512f3bba893533a82d7a02 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py311h64a7726_0.conda#9ac5334f1b5ed072d3dbc342503d7868 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311h2032efe_1.conda#4ba860ff851768615b1a25b788022750 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.6-pyhd8ed1ab_0.conda#fb1fc875719e217ed799a7aae11d3be4 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_4.conda#1e105c1a8ea2163507726144b401eb1b https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.1-py311h54ef318_0.conda#201fdabdb86bb8fb6e99fa3f0dab8122 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py311h54ef318_0.conda#9f80753bc008bfc9b95f39d9ff9f1694 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py311he8ad708_100.conda#597b1ad6cb7011b7561c20ea30295cae https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_1.conda#cd36a89a048ad2bcc6d8b43f648fb1d0 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_1.conda#10d1806e20da040c58c36deddf51c70c https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_5.conda#ec7e45bc76d9d0b69a74a2075932b8e8 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.1-py311h38be061_0.conda#8a21cbbb87357c701fa44f4cfa4e23d7 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.3-pyhd8ed1ab_1.conda#fbe2993dd48f14724b90bf12e92cc164 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 4a7d83d4c7..fa5fa80250 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,13 +1,13 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 26c72df308ccfddf5aa1ad644bf5158095cf3032f3abe9322a6f1cdaab977a7c +# input_hash: f50dc073e5fb2216547509366957a7e99607a06a604840563bff4dd4b5daedcb @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-4_cp39.conda#bfe4b3259a8ac6cdf0037752904da6a7 @@ -17,18 +17,15 @@ https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda# https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 -https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.21.0-hd590300_0.conda#c06fa0440048270817b9e3142cc661bf +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 -https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 @@ -41,17 +38,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 -https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda#bdadff838d5437aea83607ced8b37f75 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.4-hd590300_0.conda#412ba6938c3e2abaca8b1129ea82e238 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -61,7 +53,6 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 @@ -70,22 +61,16 @@ https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d -https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d -https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 -https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_6.conda#80bf3b277c120dd294b51d404b931a75 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc @@ -96,21 +81,12 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f -https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 -https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.0-h2c6b66d_0.conda#df56c636df4a98990462d66ac7be2330 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 @@ -118,49 +94,42 @@ https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_1.conda#c48418c8b35f1d59ae9ae1174812b40a -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.5-py39h3d6467e_0.conda#8a666e66408ec097bf7b6d44353d6294 -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py39h3d6467e_0.conda#bfde3cf098e298b81d1c1cbc9c79ab59 https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_1.conda#a50918d10114a0bf80fb46c7cc692058 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed +https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_1.conda#c9f74d717e5a2847a9f8b779c54130f2 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda#420f4e9be59d0dc9133a0f43f7bab3f3 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 -https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_1.conda#ee2b4665b852ec6ff2758f3c1b91233d -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.6-py39h7633fee_0.conda#e39816a8abd539079a9d0b3c9045b2cb +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py39h7633fee_0.conda#f668e146a2ed03a4e62ffbb98b3115fb https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39hd1e30aa_1.conda#c2e412b0f11e5983bcfc35d9beb91ecb https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 @@ -175,15 +144,12 @@ https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_1.conda#cbe186eefb0bcd91e8f47c3908489874 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hd1e30aa_0.conda#1da984bbb6e765743e13388ba7b7b2c8 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.3-pyhd8ed1ab_0.conda#3fc026b9c87d091c4b34a6c997324ae8 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda#07c15d846a2e4d673da22cbd85fdb6d2 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 @@ -194,77 +160,66 @@ https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda#ac992767d7f8ed2cb27e71e78f0fb2d7 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_1.conda#e5b62f0c1f96413116f16d33973f1a44 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.3-py39hd1e30aa_0.conda#873fb1d81f9e9220d605c6b05a96544c -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_1.conda#8d7242302bb3d03b9a690b6dda872603 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py39hd1e30aa_0.conda#616bc0b442acefebdbe97c7b885d771e https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda#d12374af44575413fbbd4a217d46ea33 -https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda#9f100edf65436e3eabc2a51fc00b2c37 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py39had0adad_0.conda#eeaa413fddccecb2ab7f747bdb55b07f https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece -https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py39h3d6467e_0.conda#e667a3ab0df62c54e60e1843d2e6defb https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py39h474f0d3_0.conda#62f1d2e05327bf62728afa448f2a9261 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py39h474f0d3_0.conda#459a58eda3e74dd5e3d596c618e7f20a https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39hce394fd_4.conda#4b6e79000ec3a495f429b2c1092ed63b -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py39h3d6467e_5.conda#93aff412f3e49fdb43361c0215cbd72d -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.4.0-pyhd8ed1ab_0.conda#b8dc6f9db1b9670e564b68277a79ffeb +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39h7633fee_4.conda#b66595fbda99771266f042f42c7457be +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py39h44dd56e_0.conda#baea2f5dfb3ab7b1c836385d2e1daca7 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39h7633fee_0.conda#ed71ad3e30eb03da363fb797419cce98 https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.31-pyhd8ed1ab_0.conda#fea10604a45e974b110ea15a88913ebc +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py39hd1e30aa_1.conda#ca63612907462c8e36edcc9bbacc253e -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_102.conda#487a1c19dd3eacfd055ad614e9acde87 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py39hddac248_0.conda#961b398d8c421a3752e26f01f2dcbdac https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h44dd56e_1.conda#d037c20e3da2e85f03ebd20ad480c359 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py39h474f0d3_1.conda#55441724fedb3042d38ffa5220f00804 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py39h1bc45ef_0.conda#ca067895d22f8a0d38f225a95184858e +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py39h474f0d3_0.conda#4b401c1516417b4b14aa1249d2f7929d +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py39h6404dd3_1.conda#05623249055d99c51cde021b525611db https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.6-pyhd8ed1ab_0.conda#fb1fc875719e217ed799a7aae11d3be4 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h44dd56e_4.conda#81310d21bf9d91754c1220c585bb72d6 https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.1-py39he9076e7_0.conda#89615b866cb3b0d8ad4e2a11e2bcf9a0 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py39he9076e7_0.conda#6085411aa2f0b2b801d3b46e1d3b83c5 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py39h4282601_100.conda#d2809fbf0d8ae7b8ca92c456cb44a7d4 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h44dd56e_1.conda#90c5165691fdcb5a9f43907e32ea48b4 -https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39hddac248_1.conda#8dd2eb1e7aa9a33a92a75bdcea3f0dd0 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_5.conda#e1f148e57d071b09187719df86f513c1 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.1-py39hf3d152e_0.conda#f8b1cf66dbdbc9fe1a298a11fddcfb05 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.3-pyhd8ed1ab_1.conda#fbe2993dd48f14724b90bf12e92cc164 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda#b3788794f88c9512393032e448428261 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 diff --git a/requirements/py310.yml b/requirements/py310.yml index b01586aac9..ced05dd987 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -16,7 +16,7 @@ dependencies: - cftime >=1.5 - dask-core >=2022.9.0 - libnetcdf !=4.9.1 - - matplotlib >=3.5 + - matplotlib-base >=3.5 - netcdf4 - numpy >1.21, !=1.24.3 - python-xxhash diff --git a/requirements/py311.yml b/requirements/py311.yml index 286fe74a33..5f2b23850e 100644 --- a/requirements/py311.yml +++ b/requirements/py311.yml @@ -16,7 +16,7 @@ dependencies: - cftime >=1.5 - dask-core >=2022.9.0 - libnetcdf !=4.9.1 - - matplotlib >=3.5 + - matplotlib-base >=3.5 - netcdf4 - numpy >1.21, !=1.24.3 - python-xxhash diff --git a/requirements/py39.yml b/requirements/py39.yml index f534aef4f3..a5b32748e3 100644 --- a/requirements/py39.yml +++ b/requirements/py39.yml @@ -16,7 +16,7 @@ dependencies: - cftime >=1.5 - dask-core >=2022.9.0 - libnetcdf !=4.9.1 - - matplotlib >=3.5 + - matplotlib-base >=3.5 - netcdf4 - numpy >1.21, !=1.24.3 - python-xxhash From 52f5be1b772952aa27194fdd899cb2ea29052431 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 14:44:45 +0000 Subject: [PATCH 090/134] Bump conda-incubator/setup-miniconda from 2 to 3 (#5607) Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 2 to 3. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v2...v3) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-tests.yml | 2 +- .github/workflows/ci-wheels.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 7fe06ced30..fd2105c746 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -80,7 +80,7 @@ jobs: env_name: ${{ env.ENV_NAME }} - name: "conda install" - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniforge-version: latest channels: conda-forge,defaults diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index 942d528f6d..7bdebf79f9 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -82,7 +82,7 @@ jobs: env_name: ${{ env.ENV_NAME }} - name: "conda install" - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniforge-version: latest channels: conda-forge,defaults From 5c94b521c83cdbefd1ea8f50484b634873076c7b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 14:46:32 +0000 Subject: [PATCH 091/134] Bump actions/github-script from 6 to 7 (#5580) Bumps [actions/github-script](https://github.com/actions/github-script) from 6 to 7. - [Release notes](https://github.com/actions/github-script/releases) - [Commits](https://github.com/actions/github-script/compare/v6...v7) --- updated-dependencies: - dependency-name: actions/github-script dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/benchmarks_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml index 365fc733a5..f6be9c2c5a 100644 --- a/.github/workflows/benchmarks_report.yml +++ b/.github/workflows/benchmarks_report.yml @@ -20,7 +20,7 @@ jobs: - name: Download artifact id: download-artifact # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#using-data-from-the-triggering-workflow - uses: actions/github-script@v6 + uses: actions/github-script@v7 with: script: | let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ From b771e2257b30ae1d4a8fcaf4fa95cc97c7e5a06f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 16:25:00 +0000 Subject: [PATCH 092/134] Bump actions/checkout from 3 to 4 (#5460) Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/benchmarks_report.yml | 2 +- .github/workflows/benchmarks_run.yml | 2 +- .github/workflows/ci-tests.yml | 2 +- .github/workflows/ci-wheels.yml | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml index f6be9c2c5a..cb5110dda5 100644 --- a/.github/workflows/benchmarks_report.yml +++ b/.github/workflows/benchmarks_report.yml @@ -65,7 +65,7 @@ jobs: if: needs.download.outputs.reports_exist == 1 steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download artifact uses: actions/download-artifact@v3 diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmarks_run.yml index 31b6499ecd..bcc18d62c4 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmarks_run.yml @@ -36,7 +36,7 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index fd2105c746..2c2a083050 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -55,7 +55,7 @@ jobs: steps: - name: "checkout" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: "environment configure" env: diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index 7bdebf79f9..450a18eb86 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -28,7 +28,7 @@ jobs: name: "build sdist & wheel" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 @@ -57,7 +57,7 @@ jobs: env: ENV_NAME: "ci-wheels" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 From 5496ebcee49294e0f5993991969dae99762f5f40 Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Thu, 30 Nov 2023 16:44:15 +0000 Subject: [PATCH 093/134] Fix pp save of realization coordinate (#5568) * Fix pp save of realization coordinate * Reduce lbrsvd to 4 elements Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * review actions * credit reviewer --------- Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 3 +++ lib/iris/fileformats/pp_save_rules.py | 14 +++++++++++--- lib/iris/tests/unit/fileformats/pp/test_save.py | 12 ++++++++++++ 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index de69eddd4e..4c6f06e13f 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -76,6 +76,9 @@ This document explains the changes made to Iris for this release #. `@acchamber`_ and `@rcomer`_ modified 2D plots so that time axes and their ticks have more sensible default labels. (:issue:`5426`, :pull:`5561`) +#. `@rcomer`_ and `@trexfeathers`_ (reviewer) added handling for realization + coordinates when saving pp files (:issue:`4747`, :pull:`5568`) + 💣 Incompatible Changes ======================= diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 7db21d5f99..9effba3c0a 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -614,7 +614,7 @@ def _non_std_cross_section_rules(cube, pp): def _lbproc_rules(cube, pp): """ - Rules for setting the horizontal grid and pole location of the PP field. + Rules for setting the processing code of the PP field. Note: `pp.lbproc` must be set to 0 before these rules are run. @@ -844,7 +844,10 @@ def _vertical_rules(cube, pp): def _all_other_rules(cube, pp): """ - Rules for setting the horizontal grid and pole location of the PP field. + Fields currently managed by these rules: + + * lbfc (field code) + * lbrsvd[3] (ensemble member number) Args: cube: the cube being saved as a series of PP fields. @@ -859,13 +862,18 @@ def _all_other_rules(cube, pp): if check_items in CF_TO_LBFC: pp.lbfc = CF_TO_LBFC[check_items] - # Set STASH code. + # Set field code. if ( "STASH" in cube.attributes and str(cube.attributes["STASH"]) in STASH_TRANS ): pp.lbfc = STASH_TRANS[str(cube.attributes["STASH"])].field_code + # Set ensemble member number. + real_coord = scalar_coord(cube, "realization") + if real_coord is not None: + pp.lbrsvd[3] = real_coord.points[0] + return pp diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index fc0535f428..f49d389841 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -43,6 +43,18 @@ def test_grid_and_pole__scalar_dim_longitude(unit, modulus): assert field.lbnpt == lon.points.size +def test_realization(): + cube = stock.lat_lon_cube() + real_coord = DimCoord(42, standard_name="realization", units=1) + cube.add_aux_coord(real_coord) + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: + pp_field.lbrsvd = list(range(4)) + verify(cube, pp_field) + member_number = pp_field.lbrsvd[3] + + assert member_number == 42 + + def _pp_save_ppfield_values(cube): """ Emulate saving a cube as PP, and capture the resulting PP field values. From 54ad9bef8bb3f6460fddeaef27fe8f3852bea3c2 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Fri, 1 Dec 2023 10:09:09 +0000 Subject: [PATCH 094/134] Relocated the Technical Papers documentation to Further Topics. (#5602) * moved technical papers to further topics * updated old whatsnew * removed line 64 * created further topics index page * swapped numref for ref * fixed further topics path * unfixed further topics path * moved to bottom of userguide --- .../contributing_getting_involved.rst | 1 - docs/src/further_topics/index.rst | 20 ++++++++++++++ docs/src/further_topics/lenient_maths.rst | 4 +-- docs/src/further_topics/lenient_metadata.rst | 10 +++---- docs/src/further_topics/metadata.rst | 26 +++++++++---------- .../missing_data_handling.rst | 0 .../netcdf_io.rst | 0 docs/src/further_topics/ugrid/data_model.rst | 12 ++++----- .../um_files_loading.rst | 4 +-- docs/src/techpapers/index.rst | 14 ---------- docs/src/userguide/index.rst | 13 +--------- docs/src/whatsnew/1.7.rst | 4 +-- 12 files changed, 51 insertions(+), 57 deletions(-) create mode 100644 docs/src/further_topics/index.rst rename docs/src/{techpapers => further_topics}/missing_data_handling.rst (100%) rename docs/src/{techpapers => further_topics}/netcdf_io.rst (100%) rename docs/src/{techpapers => further_topics}/um_files_loading.rst (100%) delete mode 100644 docs/src/techpapers/index.rst diff --git a/docs/src/developers_guide/contributing_getting_involved.rst b/docs/src/developers_guide/contributing_getting_involved.rst index 9ec6559114..6ade098b6b 100644 --- a/docs/src/developers_guide/contributing_getting_involved.rst +++ b/docs/src/developers_guide/contributing_getting_involved.rst @@ -61,6 +61,5 @@ If you are new to using GitHub we recommend reading the ../generated/api/iris ../whatsnew/index - ../techpapers/index ../copyright ../voted_issues diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst new file mode 100644 index 0000000000..016d9f80ea --- /dev/null +++ b/docs/src/further_topics/index.rst @@ -0,0 +1,20 @@ +.. _further_topics_index: + + +Further Topics +=============== + +Extra information on specific technical issues. + +.. toctree:: + :maxdepth: 1 + + filtering_warnings + metadata + lenient_metadata + lenient_maths + um_files_loading + missing_data_handling + netcdf_io + dask_best_practices/index + ugrid/index \ No newline at end of file diff --git a/docs/src/further_topics/lenient_maths.rst b/docs/src/further_topics/lenient_maths.rst index 818efe4763..51f77fb956 100644 --- a/docs/src/further_topics/lenient_maths.rst +++ b/docs/src/further_topics/lenient_maths.rst @@ -35,9 +35,9 @@ introduced and discussed the concept of lenient metadata; a more pragmatic and forgiving approach to :ref:`comparing `, :ref:`combining ` and understanding the :ref:`differences ` between your metadata -(:numref:`metadata members table`). The lenient metadata philosophy introduced +(:ref:`metadata members table`). The lenient metadata philosophy introduced there is extended to cube maths, with the view to also preserving as much common -coordinate (:numref:`metadata classes table`) information, as well as common +coordinate (:ref:`metadata classes table`) information, as well as common metadata, between the participating :class:`~iris.cube.Cube` operands as possible. Let's consolidate our understanding of lenient and strict cube maths through diff --git a/docs/src/further_topics/lenient_metadata.rst b/docs/src/further_topics/lenient_metadata.rst index b68ed501ba..5de9ad70c4 100644 --- a/docs/src/further_topics/lenient_metadata.rst +++ b/docs/src/further_topics/lenient_metadata.rst @@ -17,10 +17,10 @@ and also :ref:`conversion `. The common metadata API is implemented through the ``metadata`` property on each of the Iris `CF Conventions`_ class containers -(:numref:`metadata classes table`), and provides a common gateway for users to +(:ref:`metadata classes table`), and provides a common gateway for users to easily manage and manipulate their metadata in a consistent and unified way. -This is primarily all thanks to the metadata classes (:numref:`metadata classes table`) +This is primarily all thanks to the metadata classes (:ref:`metadata classes table`) that support the necessary state and behaviour required by the common metadata API. Namely, it is the ``equal`` (``__eq__``), ``difference`` and ``combine`` methods that provide this rich metadata behaviour, all of which are explored @@ -267,7 +267,7 @@ Now, compare our metadata, >>> metadata.equal(latitude.metadata, lenient=True) True -Again, lenient equality (:numref:`lenient equality table`) offers a more +Again, lenient equality (:ref:`lenient equality table`) offers a more forgiving and practical alternative to strict behaviour. @@ -277,7 +277,7 @@ Lenient Difference ------------------ Similar to :ref:`lenient equality`, the lenient ``difference`` method -(:numref:`lenient difference table`) considers there to be no difference between +(:ref:`lenient difference table`) considers there to be no difference between comparing **something** with **nothing** (``None``). This working assumption is not naively applied to all metadata members, but rather a more pragmatic approach is adopted, as discussed later in :ref:`lenient members`. @@ -334,7 +334,7 @@ Lenient Combination ------------------- The behaviour of the lenient ``combine`` metadata class method is outlined -in :numref:`lenient combine table`, and as with :ref:`lenient equality` and +in :ref:`lenient combine table`, and as with :ref:`lenient equality` and :ref:`lenient difference` is enabled through the ``lenient`` keyword argument. The difference in behaviour between **lenient** and diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst index 10efcdf7fe..6d32b10b7a 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -52,9 +52,9 @@ give them meaning. The **metadata** used to define an Iris `CF Conventions`_ class is composed of individual **metadata members**, almost all of which reference specific `CF Conventions`_ terms. The individual metadata members used to define each of -the Iris `CF Conventions`_ classes are shown in :numref:`metadata members table`. +the Iris `CF Conventions`_ classes are shown in :ref:`metadata members table`. -As :numref:`metadata members table` highlights, **specific** metadata is used to +As :ref:`metadata members table` highlights, **specific** metadata is used to define and represent each Iris `CF Conventions`_ class. This means that metadata alone, can be used to easily **identify**, **compare** and **differentiate** between individual class instances. @@ -111,7 +111,7 @@ Common Metadata API cube = iris.load_cube(iris.sample_data_path("A1B_north_america.nc")) As of Iris ``3.0.0``, a unified treatment of metadata has been applied -across each Iris class (:numref:`metadata members table`) to allow users +across each Iris class (:ref:`metadata members table`) to allow users to easily manage and manipulate their metadata in a consistent way. This is achieved through the ``metadata`` property, which allows you to @@ -158,7 +158,7 @@ Or use the ``metadata`` property again, but this time on the ``forecast_period`` CoordMetadata(standard_name='forecast_period', long_name=None, var_name='forecast_period', units=Unit('hours'), attributes={}, coord_system=None, climatological=False) Note that, the ``metadata`` property is available on each of the Iris `CF Conventions`_ -class containers referenced in :numref:`metadata members table`, and thus provides +class containers referenced in :ref:`metadata members table`, and thus provides a **common** and **consistent** approach to managing your metadata, which we'll now explore a little more fully. @@ -168,7 +168,7 @@ Metadata Classes The ``metadata`` property will return an appropriate `namedtuple`_ metadata class for each Iris `CF Conventions`_ class container. The metadata class returned by -each container class is shown in :numref:`metadata classes table` below, +each container class is shown in :ref:`metadata classes table` below, .. _metadata classes table: .. table:: - Iris namedtuple metadata classes @@ -187,7 +187,7 @@ each container class is shown in :numref:`metadata classes table` below, ========================================== ======================================================== Akin to the behaviour of a `namedtuple`_, the metadata classes in -:numref:`metadata classes table` create **tuple-like** instances i.e., they provide a +:ref:`metadata classes table` create **tuple-like** instances i.e., they provide a **snapshot** of the associated metadata member **values**, which are **not settable**, but they **may be mutable** depending on the data-type of the member. For example, given the following ``metadata`` of a :class:`~iris.coords.DimCoord`, @@ -243,13 +243,13 @@ with a **snapshot** of the container class metadata values at that point in time Skip ahead to :ref:`metadata assignment ` for a fuller discussion on options how to **set** and **get** metadata on the instance of -an Iris `CF Conventions`_ container class (:numref:`metadata classes table`). +an Iris `CF Conventions`_ container class (:ref:`metadata classes table`). Metadata Class Behaviour ------------------------ -As mentioned previously, the metadata classes in :numref:`metadata classes table` +As mentioned previously, the metadata classes in :ref:`metadata classes table` inherit the behaviour of a `namedtuple`_, and so act and feel like a `namedtuple`_, just as you might expect. For example, given the following ``metadata``, @@ -326,7 +326,7 @@ Richer Metadata Behaviour cube = iris.load_cube(iris.sample_data_path("A1B_north_america.nc")) longitude = cube.coord("longitude") -The metadata classes from :numref:`metadata classes table` support additional +The metadata classes from :ref:`metadata classes table` support additional behaviour above and beyond that of the standard Python `namedtuple`_, which allows you to easily **compare**, **combine**, **convert** and understand the **difference** between your ``metadata`` instances. @@ -340,7 +340,7 @@ Metadata Equality The metadata classes support both **equality** (``__eq__``) and **inequality** (``__ne__``), but no other `rich comparison`_ operators are implemented. This is simply because there is no obvious ordering to any collective of metadata -members, as defined in :numref:`metadata members table`. +members, as defined in :ref:`metadata members table`. For example, given the following :class:`~iris.coords.DimCoord`, @@ -455,7 +455,7 @@ be ``False``, The reason different metadata classes cannot be compared is simply because each metadata class contains **different** members, as shown in -:numref:`metadata members table`. However, there is an exception to the rule... +:ref:`metadata members table`. However, there is an exception to the rule... .. _exception rule: @@ -834,7 +834,7 @@ using ``from_metadata``, >>> print(newmeta) DimCoordMetadata(standard_name=air_temperature, var_name=air_temperature, units=K, attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}) -By examining :numref:`metadata members table`, we can see that the +By examining :ref:`metadata members table`, we can see that the :class:`~iris.cube.Cube` and :class:`~iris.coords.DimCoord` container classes share the following common metadata members, @@ -880,7 +880,7 @@ Metadata Assignment latitude = cube.coord("latitude") The ``metadata`` property available on each Iris `CF Conventions`_ container -class (:numref:`metadata classes table`) can not only be used **to get** +class (:ref:`metadata classes table`) can not only be used **to get** the metadata of an instance, but also **to set** the metadata on an instance. For example, given the following :class:`~iris.common.metadata.DimCoordMetadata` of the diff --git a/docs/src/techpapers/missing_data_handling.rst b/docs/src/further_topics/missing_data_handling.rst similarity index 100% rename from docs/src/techpapers/missing_data_handling.rst rename to docs/src/further_topics/missing_data_handling.rst diff --git a/docs/src/techpapers/netcdf_io.rst b/docs/src/further_topics/netcdf_io.rst similarity index 100% rename from docs/src/techpapers/netcdf_io.rst rename to docs/src/further_topics/netcdf_io.rst diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/further_topics/ugrid/data_model.rst index 208254ada6..0b4334e0f0 100644 --- a/docs/src/further_topics/ugrid/data_model.rst +++ b/docs/src/further_topics/ugrid/data_model.rst @@ -46,7 +46,7 @@ Structured Grids (the old world) Assigning data to locations using a structured grid is essentially an act of matching coordinate arrays to each dimension of the data array. The data can also be represented as an area (instead of a point) by including a bounds array -for each coordinate array. :numref:`data_structured_grid` visualises an +for each coordinate array. :ref:`data_structured_grid` visualises an example. .. _data_structured_grid: @@ -125,7 +125,7 @@ datum per element, matched to its element by matching the datum index with the coordinate or connectivity index along the **unstructured dimension**. So for an example data array called ``foo``: ``foo[3]`` would be at position ``(x[3], y[3])`` if it were node-located, or at -``faces[3]`` if it were face-located. :numref:`data_ugrid_mesh` visualises an +``faces[3]`` if it were face-located. :ref:`data_ugrid_mesh` visualises an example of what is described above. .. _data_ugrid_mesh: @@ -152,7 +152,7 @@ example of what is described above. The mesh model also supports edges/faces/volumes having associated 'centre' coordinates - to allow point data to be assigned to these elements. 'Centre' is just a convenience term - the points can exist anywhere within their respective -elements. See :numref:`ugrid_element_centres` for a visualised example. +elements. See :ref:`ugrid_element_centres` for a visualised example. .. _ugrid_element_centres: .. figure:: images/ugrid_element_centres.svg @@ -175,7 +175,7 @@ Above we have seen how one could replicate data on a structured grid using a mesh instead. But the utility of a mesh is the extra flexibility it offers. Here are the main examples: -Every node is completely independent - every one can have unique X andY (and Z) coordinate values. See :numref:`ugrid_node_independence`. +Every node is completely independent - every one can have unique X andY (and Z) coordinate values. See :ref:`ugrid_node_independence`. .. _ugrid_node_independence: .. figure:: images/ugrid_node_independence.svg @@ -194,7 +194,7 @@ Every node is completely independent - every one can have unique X andY (and Z) Faces and volumes can have variable node counts, i.e. different numbers of sides. This is achieved by masking the unused 'slots' in the connectivity -array. See :numref:`ugrid_variable_faces`. +array. See :ref:`ugrid_variable_faces`. .. _ugrid_variable_faces: .. figure:: images/ugrid_variable_faces.svg @@ -211,7 +211,7 @@ array. See :numref:`ugrid_variable_faces`. (black circles) for faces with fewer nodes than the maximum. Data can be assigned to lines (edges) just as easily as points (nodes) or -areas (faces). See :numref:`ugrid_edge_data`. +areas (faces). See :ref:`ugrid_edge_data`. .. _ugrid_edge_data: .. figure:: images/ugrid_edge_data.svg diff --git a/docs/src/techpapers/um_files_loading.rst b/docs/src/further_topics/um_files_loading.rst similarity index 100% rename from docs/src/techpapers/um_files_loading.rst rename to docs/src/further_topics/um_files_loading.rst index f94898b3aa..9d9393f16d 100644 --- a/docs/src/techpapers/um_files_loading.rst +++ b/docs/src/further_topics/um_files_loading.rst @@ -1,5 +1,3 @@ -.. _um_files_loading: - .. testsetup:: import numpy as np @@ -13,6 +11,8 @@ np.set_printoptions(precision=8) +.. _um_files_loading: + =================================== Iris Handling of PP and Fieldsfiles =================================== diff --git a/docs/src/techpapers/index.rst b/docs/src/techpapers/index.rst deleted file mode 100644 index e97a87f39c..0000000000 --- a/docs/src/techpapers/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. _techpapers_index: - - -Iris Technical Papers -===================== - -Extra information on specific technical issues. - -.. toctree:: - :maxdepth: 1 - - um_files_loading.rst - missing_data_handling.rst - netcdf_io.rst diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst index c87323da8e..8b7ae20cba 100644 --- a/docs/src/userguide/index.rst +++ b/docs/src/userguide/index.rst @@ -36,15 +36,4 @@ they may serve as a useful reference for future exploration. citation code_maintenance glossary - - -.. toctree:: - :maxdepth: 2 - :caption: Further Topics - - ../further_topics/filtering_warnings - ../further_topics/metadata - ../further_topics/lenient_metadata - ../further_topics/lenient_maths - ../further_topics/dask_best_practices/index - ../further_topics/ugrid/index + ../further_topics/index diff --git a/docs/src/whatsnew/1.7.rst b/docs/src/whatsnew/1.7.rst index 1d7c7c3f60..4c3f3197dc 100644 --- a/docs/src/whatsnew/1.7.rst +++ b/docs/src/whatsnew/1.7.rst @@ -329,6 +329,6 @@ Documentation * A clarification of the behaviour of :func:`iris.analysis.calculus.differentiate`. -* A new :doc:`"Technical Papers" ` section has been added to +* A new Technical Papers section has been added to the documentation along with the addition of a paper providing an - :doc:`overview of the load process for UM-like fileformats (e.g. PP and Fieldsfile) `. + :ref:`overview of the load process for UM-like fileformats (e.g. PP and Fieldsfile) `. From eafefdbbd90d107a6ca764820895cc75e4d2fa60 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Fri, 1 Dec 2023 12:03:26 +0000 Subject: [PATCH 095/134] Feedstock rc branch management in do-nothing script (#5515) * Feedstock rc branch management in do-nothing script. * Correct release_do_nothing cross reference. * More nuance for the rc branch archiving process. --- docs/src/whatsnew/latest.rst | 9 ++++-- tools/release_do_nothing.py | 58 +++++++++++++++++++++++++++++++++++- 2 files changed, 64 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 4c6f06e13f..884bfd376a 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -39,7 +39,7 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles lazy data. (:pull:`5307`) - + #. `@trexfeathers`_ and `@HGWright`_ (reviewer) sub-categorised all Iris' :class:`UserWarning`\s for richer filtering. The full index of sub-categories can be seen here: :mod:`iris.exceptions` . (:pull:`5498`) @@ -54,7 +54,7 @@ This document explains the changes made to Iris for this release Winter - December to February) will be assigned to the preceding year (e.g. the year of December) instead of the following year (the default behaviour). (:pull:`5573`) - + #. `@HGWright`_ added :attr:`~iris.coords.Coord.ignore_axis` to allow manual intervention preventing :func:`~iris.util.guess_coord_axis` from acting on a coordinate. (:pull:`5551`) @@ -161,6 +161,11 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ reduced the size of the conda environment used for testing. (:pull:`5606`) +#. `@trexfeathers`_ and `@pp-mo`_ improved how the conda-forge feedstock + release candidate branch is managed, via: + :doc:`../developers_guide/release_do_nothing`. + (:pull:`5515`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, diff --git a/tools/release_do_nothing.py b/tools/release_do_nothing.py index bd38f0f733..94f2d96829 100755 --- a/tools/release_do_nothing.py +++ b/tools/release_do_nothing.py @@ -530,7 +530,63 @@ def update_conda_forge( _wait_for_done(message) if is_release_candidate: - upstream_branch = "release-candidate" + message = ( + "Visit the conda-forge feedstock branches page:\n" + "https://github.com/conda-forge/iris-feedstock/branches" + ) + _wait_for_done(message) + + message = ( + "Find the release candidate branch - " + "`rc`/`release-candidate`/similar.\n" + ) + rc_branch = _get_input( + message, + "Input the name of the release candidate branch" + ) + + message = ( + f"Is the latest commit on {rc_branch} over 1 month ago?" + ) + archive_rc = None + while archive_rc is None: + age_check = _get_input(message, "y / n") + if age_check.casefold() == "y".casefold(): + archive_rc = True + elif age_check.casefold() == "n".casefold(): + archive_rc = False + else: + _report_problem("Invalid entry. Please try again ...") + + if archive_rc: + # We chose this odd handling of release candidate branches because + # a persistent branch will gradually diverge as `main` receives + # automatic and manual maintenance (where recreating these on + # another branch is often beyond Iris dev expertise). Advised + # practice from conda-forge is also liable to evolve over time. + # Since there is no benefit to a continuous Git history on the + # release candidate branch, the simplest way to keep it aligned + # with best practice is to regularly create a fresh branch from + # `main`. + + date_string = datetime.today().strftime("%Y%m%d") + message = ( + f"Archive the {rc_branch} branch by appending _{date_string} " + "to its name.\n" + f"e.g. rc_{date_string}\n\n" + f"({__file__} includes an explanation of this in the comments)." + ) + _wait_for_done(message) + + message = ( + "Follow the latest conda-forge guidance for creating a new " + "release candidate branch from the `main` branch:\n" + "https://conda-forge.org/docs/maintainer/knowledge_base.html#pre-release-builds\n\n" + "Config file(s) should point to the `rc_iris` label.\n" + ) + rc_branch = _get_input(message, "Input the name of your new branch") + + upstream_branch = rc_branch else: upstream_branch = "main" From 40cb3d1c7a83fffcce644e6a181fc98db4236f6b Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Mon, 4 Dec 2023 10:02:30 +0000 Subject: [PATCH 096/134] add links to scitools-classroom repo. (#5609) --- docs/src/userguide/index.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst index 8b7ae20cba..d986a986ad 100644 --- a/docs/src/userguide/index.rst +++ b/docs/src/userguide/index.rst @@ -18,6 +18,12 @@ they may serve as a useful reference for future exploration. sequentially using the ``next`` and ``previous`` links at the bottom of each page. +.. note:: + + There is also useful learning material held in the + https://github.com/scitools-classroom repo, including tutorials, courses + and presentations. + .. toctree:: :maxdepth: 2 From db43fde0534af0ee32c77f7747ab506da5552c1d Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Mon, 4 Dec 2023 16:17:55 +0000 Subject: [PATCH 097/134] DOCS: Numpydocs1 (#5578) * baseline * added some noqa. * api contents ordering to aplhabetical * remove duplicate note * updated string to str for rendering in docs * ensured spaced around colon for listed parameters. --- docs/src/conf.py | 3 +- lib/iris/config.py | 51 +- lib/iris/fileformats/netcdf/__init__.py | 3 +- lib/iris/fileformats/netcdf/_dask_locks.py | 80 ++- .../fileformats/netcdf/_thread_safe_nc.py | 104 +-- lib/iris/fileformats/netcdf/loader.py | 37 +- lib/iris/fileformats/netcdf/saver.py | 604 +++++++++--------- lib/iris/io/__init__.py | 142 ++-- lib/iris/io/format_picker.py | 130 ++-- lib/iris/time.py | 53 +- 10 files changed, 634 insertions(+), 573 deletions(-) diff --git a/docs/src/conf.py b/docs/src/conf.py index e349000862..c59aca4909 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -15,7 +15,6 @@ # # All configuration values have a default; values that are commented out # serve to show the default. - # ---------------------------------------------------------------------------- import datetime @@ -195,7 +194,7 @@ def _dotv(version): todo_include_todos = True # api generation configuration -autodoc_member_order = "groupwise" +autodoc_member_order = "alphabetical" autodoc_default_flags = ["show-inheritance"] # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_typehints diff --git a/lib/iris/config.py b/lib/iris/config.py index c1d1de5793..22fb93a06a 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -27,6 +27,7 @@ The [optional] name of the logger to notify when first imported. ---------- + """ import configparser @@ -42,41 +43,37 @@ def get_logger( name, datefmt=None, fmt=None, level=None, propagate=None, handler=True ): """ + Create a custom class for logging. + Create a :class:`logging.Logger` with a :class:`logging.StreamHandler` and custom :class:`logging.Formatter`. - Args: - - * name: + Parameters + ---------- + name The name of the logger. Typically this is the module filename that owns the logger. - - Kwargs: - - * datefmt: + datefmt: optional The date format string of the :class:`logging.Formatter`. Defaults to ``%d-%m-%Y %H:%M:%S``. - - * fmt: + fmt: optional The additional format string of the :class:`logging.Formatter`. This is appended to the default format string ``%(asctime)s %(name)s %(levelname)s - %(message)s``. - - * level: + level: optional The threshold level of the logger. Defaults to ``INFO``. - - * propagate: + propagate: optional Sets the ``propagate`` attribute of the :class:`logging.Logger`, which determines whether events logged to this logger will be passed to the handlers of higher level loggers. Defaults to ``False``. - - * handler: + handler: optional Create and attach a :class:`logging.StreamHandler` to the logger. Defaults to ``True``. - Returns: - A :class:`logging.Logger`. + Returns + ------- + :class:`logging.Logger`. """ if level is None: @@ -118,6 +115,8 @@ def get_logger( # Returns simple string options def get_option(section, option, default=None): """ + Return the option value for the given section. + Returns the option value for the given section, or the default value if the section/option is not present. @@ -131,6 +130,8 @@ def get_option(section, option, default=None): # Returns directory path options def get_dir_option(section, option, default=None): """ + Return the directory path from the given option and section. + Returns the directory path from the given option and section, or returns the given default value if the section/option is not present or does not represent a valid directory. @@ -196,20 +197,19 @@ def __init__(self, conventions_override=None): """ Set up NetCDF processing options for Iris. - Currently accepted kwargs: - - * conventions_override (bool): + Parameters + ---------- + conventions_override : bool, optional Define whether the CF Conventions version (e.g. `CF-1.6`) set when saving a cube to a NetCDF file should be defined by - Iris (the default) or the cube being saved. - - If `False` (the default), specifies that Iris should set the + Iris (the default) or the cube being saved. If `False` + (the default), specifies that Iris should set the CF Conventions version when saving cubes as NetCDF files. If `True`, specifies that the cubes being saved to NetCDF should set the CF Conventions version for the saved NetCDF files. - Example usages: - + Examples + -------- * Specify, for the lifetime of the session, that we want all cubes written to NetCDF to define their own CF Conventions versions:: @@ -276,6 +276,7 @@ def _defaults_dict(self): def context(self, **kwargs): """ Allow temporary modification of the options via a context manager. + Accepted kwargs are the same as can be supplied to the Option. """ diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index 99817c5921..cf550fbb57 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """ -Module to support the loading and saving of NetCDF files, also using the CF conventions -for metadata interpretation. +Support loading and saving NetCDF files using CF conventions for metadata interpretation. See : `NetCDF User's Guide `_ and `netCDF4 python module `_. diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py index b7727a1ab7..82edbf202e 100644 --- a/lib/iris/fileformats/netcdf/_dask_locks.py +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -5,45 +5,49 @@ """ Module containing code to create locks enabling dask workers to co-operate. -This matter is complicated by needing different solutions for different dask scheduler -types, i.e. local 'threads' scheduler, local 'processes' or distributed. +This matter is complicated by needing different solutions for different dask +scheduler types, i.e. local 'threads' scheduler, local 'processes' or +distributed. -In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a netCDF4.Dataset -targeting an output file, and creates a Saver.file_write_lock object to serialise -write-accesses to the file from dask tasks : All dask-task file writes go via a -"iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, which also contains a link -to the Saver.file_write_lock, and uses it to prevent workers from fouling each other. +In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a +netCDF4.Dataset targeting an output file, and creates a Saver.file_write_lock +object to serialise write-accesses to the file from dask tasks : All dask-task +file writes go via a "iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, +which also contains a link to the Saver.file_write_lock, and uses it to prevent +workers from fouling each other. For each chunk written, the NetCDFWriteProxy acquires the common per-file lock; -opens a Dataset on the file; performs a write to the relevant variable; closes the -Dataset and then releases the lock. This process is obviously very similar to what the -NetCDFDataProxy does for reading lazy chunks. +opens a Dataset on the file; performs a write to the relevant variable; closes +the Dataset and then releases the lock. This process is obviously very similar +to what the NetCDFDataProxy does for reading lazy chunks. -For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The workers -(threads) execute tasks which contain a NetCDFWriteProxy, as above. All of those -contain the common lock, and this is simply **the same object** for all workers, since -they share an address space. +For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The +workers (threads) execute tasks which contain a NetCDFWriteProxy, as above. +All of those contain the common lock, and this is simply **the same object** +for all workers, since they share an address space. For a distributed scheduler, the Saver.lock is a `distributed.Lock()` which is identified with the output filepath. This is distributed to the workers by -serialising the task function arguments, which will include the NetCDFWriteProxy. -A worker behaves like a process, though it may execute on a remote machine. When a -distributed.Lock is deserialised to reconstruct the worker task, this creates an object -that communicates with the scheduler. These objects behave as a single common lock, -as they all have the same string 'identity', so the scheduler implements inter-process -communication so that they can mutually exclude each other. +serialising the task function arguments, which will include the +NetCDFWriteProxy. A worker behaves like a process, though it may execute on a +remote machine. When a distributed.Lock is deserialised to reconstruct the +worker task, this creates an object that communicates with the scheduler. +These objects behave as a single common lock, as they all have the same string +'identity', so the scheduler implements inter-process communication so that +they can mutually exclude each other. It is also *conceivable* that multiple processes could write to the same file in -parallel, if the operating system supports it. However, this also requires that the -libnetcdf C library is built with parallel access option, which is not common. -With the "ordinary" libnetcdf build, a process which attempts to open for writing a file -which is _already_ open for writing simply raises an access error. -In any case, Iris netcdf saver will not support this mode of operation, at present. +parallel, if the operating system supports it. However, this also requires +that the libnetcdf C library is built with parallel access option, which is +not common. With the "ordinary" libnetcdf build, a process which attempts to +open for writing a file which is _already_ open for writing simply raises an +access error. In any case, Iris netcdf saver will not support this mode of +operation, at present. We don't currently support a local "processes" type scheduler. If we did, the -behaviour should be very similar to a distributed scheduler. It would need to use some -other serialisable shared-lock solution in place of 'distributed.Lock', which requires -a distributed scheduler to function. +behaviour should be very similar to a distributed scheduler. It would need to +use some other serialisable shared-lock solution in place of +'distributed.Lock', which requires a distributed scheduler to function. """ import threading @@ -55,7 +59,7 @@ # A dedicated error class, allowing filtering and testing of errors raised here. -class DaskSchedulerTypeError(ValueError): +class DaskSchedulerTypeError(ValueError): # noqa: D101 pass @@ -82,11 +86,13 @@ def get_dask_array_scheduler_type(): Returns one of 'distributed', 'threads' or 'processes'. The return value is a valid argument for dask.config.set(scheduler=). - This cannot distinguish between distributed local and remote clusters -- both of - those simply return 'distributed'. + This cannot distinguish between distributed local and remote clusters -- + both of those simply return 'distributed'. - NOTE: this takes account of how dask is *currently* configured. It will be wrong - if the config changes before the compute actually occurs. + Notes + ----- + This takes account of how dask is *currently* configured. It will + be wrong if the config changes before the compute actually occurs. """ if dask_scheduler_is_distributed(): @@ -114,8 +120,12 @@ def get_worker_lock(identity: str): """ Return a mutex Lock which can be shared by multiple Dask workers. - The type of Lock generated depends on the dask scheduler type, which must therefore - be set up before this is called. + The type of Lock generated depends on the dask scheduler type, which must + therefore be set up before this is called. + + Parameters + ---------- + identity : str """ scheduler_type = get_dask_array_scheduler_type() diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index b5226b8e42..5abffb896f 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -24,7 +24,10 @@ class _ThreadSafeWrapper(ABC): """ - Contains a netCDF4 class instance, ensuring wrapping all API calls within _GLOBAL_NETCDF4_LOCK. + Contains a netCDF4 class instance, ensuring wrapping all API calls. + + Contains a netCDF4 class instance, ensuring wrapping all API calls within + _GLOBAL_NETCDF4_LOCK. Designed to 'gate keep' all the instance's API calls, but allowing the same API as if working directly with the instance itself. @@ -117,7 +120,7 @@ class VariableWrapper(_ThreadSafeWrapper): def setncattr(self, *args, **kwargs) -> None: """ - Calls netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK. + Call netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK. Only defined explicitly in order to get some mocks to work. """ @@ -141,11 +144,12 @@ def dimensions(self) -> typing.List[str]: def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]: """ - Calls netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. + Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK. - The original returned netCDF4.Dimensions are simply replaced with their - respective DimensionWrappers, ensuring that downstream calls are - also performed within _GLOBAL_NETCDF4_LOCK. + Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, + returning DimensionWrappers. The original returned netCDF4.Dimensions + are simply replaced with their respective DimensionWrappers, ensuring + that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: dimensions_ = list( @@ -171,11 +175,12 @@ class GroupWrapper(_ThreadSafeWrapper): @property def dimensions(self) -> typing.Dict[str, DimensionWrapper]: """ - Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. + Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. - The original returned netCDF4.Dimensions are simply replaced with their - respective DimensionWrappers, ensuring that downstream calls are - also performed within _GLOBAL_NETCDF4_LOCK. + Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, + returning DimensionWrappers. The original returned netCDF4.Dimensions + are simply replaced with their respective DimensionWrappers, ensuring + that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: dimensions_ = self._contained_instance.dimensions @@ -186,11 +191,13 @@ def dimensions(self) -> typing.Dict[str, DimensionWrapper]: def createDimension(self, *args, **kwargs) -> DimensionWrapper: """ - Calls createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. + Call createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. - The original returned netCDF4.Dimension is simply replaced with its - respective DimensionWrapper, ensuring that downstream calls are - also performed within _GLOBAL_NETCDF4_LOCK. + Call createDimension() from netCDF4.Group/Dataset within + _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. The original returned + netCDF4.Dimension is simply replaced with its respective + DimensionWrapper, ensuring that downstream calls are also performed + within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: new_dimension = self._contained_instance.createDimension( @@ -204,11 +211,12 @@ def createDimension(self, *args, **kwargs) -> DimensionWrapper: @property def variables(self) -> typing.Dict[str, VariableWrapper]: """ - Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. + Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. - The original returned netCDF4.Variables are simply replaced with their - respective VariableWrappers, ensuring that downstream calls are - also performed within _GLOBAL_NETCDF4_LOCK. + Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, + returning VariableWrappers. The original returned netCDF4.Variables + are simply replaced with their respective VariableWrappers, ensuring + that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: variables_ = self._contained_instance.variables @@ -218,11 +226,13 @@ def variables(self) -> typing.Dict[str, VariableWrapper]: def createVariable(self, *args, **kwargs) -> VariableWrapper: """ - Calls createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. + Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. - The original returned netCDF4.Variable is simply replaced with its - respective VariableWrapper, ensuring that downstream calls are - also performed within _GLOBAL_NETCDF4_LOCK. + Call createVariable() from netCDF4.Group/Dataset within + _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. The original + returned netCDF4.Variable is simply replaced with its respective + VariableWrapper, ensuring that downstream calls are also performed + within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: new_variable = self._contained_instance.createVariable( @@ -234,7 +244,10 @@ def get_variables_by_attributes( self, *args, **kwargs ) -> typing.List[VariableWrapper]: """ - Calls get_variables_by_attributes() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. + Call get_variables_by_attributes() from netCDF4.Group/Dataset. + + Call get_variables_by_attributes() from netCDF4.Group/Dataset + within_GLOBAL_NETCDF4_LOCK, returning VariableWrappers. The original returned netCDF4.Variables are simply replaced with their respective VariableWrappers, ensuring that downstream calls are @@ -254,7 +267,10 @@ def get_variables_by_attributes( @property def groups(self): """ - Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers. + Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + + Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, + returning GroupWrappers. The original returned netCDF4.Groups are simply replaced with their respective GroupWrappers, ensuring that downstream calls are @@ -267,7 +283,10 @@ def groups(self): @property def parent(self): """ - Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper. + Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + + Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, + returning a GroupWrapper. The original returned netCDF4.Group is simply replaced with its respective GroupWrapper, ensuring that downstream calls are @@ -279,11 +298,13 @@ def parent(self): def createGroup(self, *args, **kwargs): """ - Calls createGroup() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. + Call createGroup() from netCDF4.Group/Dataset. - The original returned netCDF4.Group is simply replaced with its - respective GroupWrapper, ensuring that downstream calls are - also performed within _GLOBAL_NETCDF4_LOCK. + Call createGroup() from netCDF4.Group/Dataset within + _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. The original returned + netCDF4.Group is simply replaced with its respective GroupWrapper, + ensuring that downstream calls are also performed within + _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: new_group = self._contained_instance.createGroup(*args, **kwargs) @@ -304,11 +325,12 @@ class DatasetWrapper(GroupWrapper): @classmethod def fromcdl(cls, *args, **kwargs): """ - Calls netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, returning a DatasetWrapper. + Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK. - The original returned netCDF4.Dataset is simply replaced with its - respective DatasetWrapper, ensuring that downstream calls are - also performed within _GLOBAL_NETCDF4_LOCK. + Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, + returning a DatasetWrapper. The original returned netCDF4.Dataset is + simply replaced with its respective DatasetWrapper, ensuring that + downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: instance = cls.CONTAINED_CLASS.fromcdl(*args, **kwargs) @@ -329,12 +351,13 @@ def __init__(self, shape, dtype, path, variable_name, fill_value): @property def ndim(self): + # noqa: D102 return len(self.shape) def __getitem__(self, keys): # Using a DatasetWrapper causes problems with invalid ID's and the - # netCDF4 library, presumably because __getitem__ gets called so many - # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead. + # netCDF4 library, presumably because __getitem__ gets called so many + # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead. with _GLOBAL_NETCDF4_LOCK: dataset = netCDF4.Dataset(self.path) try: @@ -363,11 +386,14 @@ def __setstate__(self, state): class NetCDFWriteProxy: """ - The "opposite" of a NetCDFDataProxy : An object mimicking the data access of a - netCDF4.Variable, but where the data is to be ***written to***. + An object mimicking the data access of a netCDF4.Variable. + + The "opposite" of a NetCDFDataProxy : An object mimicking the data access + of a netCDF4.Variable, but where the data is to be ***written to***. - It encapsulates the netcdf file and variable which are actually to be written to. - This opens the file each time, to enable writing the data chunk, then closes it. + It encapsulates the netcdf file and variable which are actually to be + written to. This opens the file each time, to enable writing the data + chunk, then closes it. TODO: could be improved with a caching scheme, but this just about works. """ diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 623d1eb6c7..84e04c1589 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """ -Module to support the loading of Iris cubes from NetCDF files, also using the CF -conventions for metadata interpretation. +Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation. See : `NetCDF User's Guide `_ and `netCDF4 python module `_. @@ -150,7 +149,6 @@ def _actions_activation_stats(engine, cf_name): def _set_attributes(attributes, key, value): """Set attributes dictionary, converting unicode strings appropriately.""" - if isinstance(value, str): try: attributes[str(key)] = str(value) @@ -162,6 +160,8 @@ def _set_attributes(attributes, key, value): def _add_unused_attributes(iris_object, cf_var): """ + Populate the attributes of a cf element with the "unused" attributes. + Populate the attributes of a cf element with the "unused" attributes from the associated CF-netCDF variable. That is, all those that aren't CF reserved terms. @@ -398,10 +398,7 @@ def fix_attributes_all_elements(role_name): def _load_aux_factory(engine, cube): - """ - Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory. - - """ + """Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory.""" formula_type = engine.requires.get("formula_type") if formula_type in [ "atmosphere_sigma_coordinate", @@ -527,9 +524,10 @@ def _translate_constraints_to_var_callback(constraints): """ Translate load constraints into a simple data-var filter function, if possible. - Returns: - * function(cf_var:CFDataVariable): --> bool, - or None. + Returns + ------- + function : (cf_var:CFDataVariable) + bool, or None. For now, ONLY handles a single NameConstraint with no 'STASH' component. @@ -569,25 +567,24 @@ def inner(cf_datavar): def load_cubes(file_sources, callback=None, constraints=None): """ - Loads cubes from a list of NetCDF filenames/OPeNDAP URLs. - - Args: + Load cubes from a list of NetCDF filenames/OPeNDAP URLs. - * file_sources (string/list): + Parameters + ---------- + file_sources : str or list One or more NetCDF filenames/OPeNDAP URLs to load from. OR open datasets. - Kwargs: - - * callback (callable function): + callback : function, optional Function which can be passed on to :func:`iris.io.run_callback`. - Returns: - Generator of loaded NetCDF :class:`iris.cube.Cube`. + Returns + ------- + Generator of loaded NetCDF :class:`iris.cube.Cube`. """ # TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded - # into standard behaviour. + # into standard behaviour. # Deferred import to avoid circular imports. from iris.experimental.ugrid.cf import CFUGridReader from iris.experimental.ugrid.load import ( diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index fcbc9a5383..b0bff313e9 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -3,6 +3,8 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """ +Module to support the saving of Iris cubes to a NetCDF file. + Module to support the saving of Iris cubes to a NetCDF file, also using the CF conventions for metadata interpretation. @@ -178,19 +180,19 @@ def __init__(self): self._map = [] def append(self, name, coord): - """ - Append the given name and coordinate pair to the mapping. + """Append the given name and coordinate pair to the mapping. - Args: - - * name: + Parameters + ---------- + name: CF name of the associated coordinate. - * coord: + coord: The coordinate of the associated CF name. - Returns: - None. + Returns + ------- + None. """ self._map.append(CFNameCoordMap._Map(name, coord)) @@ -198,26 +200,24 @@ def append(self, name, coord): @property def names(self): """Return all the CF names.""" - return [pair.name for pair in self._map] @property def coords(self): """Return all the coordinates.""" - return [pair.coord for pair in self._map] def name(self, coord): - """ - Return the CF name, given a coordinate, or None if not recognised. + """Return the CF name, given a coordinate, or None if not recognised. - Args: - - * coord: + Parameters + ---------- + coord: The coordinate of the associated CF name. - Returns: - Coordinate or None. + Returns + ------- + Coordinate or None. """ result = None @@ -228,17 +228,16 @@ def name(self, coord): return result def coord(self, name): - """ - Return the coordinate, given a CF name, or None if not recognised. - - Args: + """Return the coordinate, given a CF name, or None if not recognised. - * name: + Parameters + ---------- + name: CF name of the associated coordinate, or None if not recognised. - Returns: - CF name or None. - + Returns + ------- + CF name or None. """ result = None for pair in self._map: @@ -250,6 +249,8 @@ def coord(self, name): def _bytes_if_ascii(string): """ + Convert string to a byte string (str in py2k, bytes in py3k). + Convert the given string to a byte string (str in py2k, bytes in py3k) if the given string can be encoded to ascii, else maintain the type of the inputted string. @@ -268,6 +269,8 @@ def _bytes_if_ascii(string): def _setncattr(variable, name, attribute): """ + Put the given attribute on the given netCDF4 Data type. + Put the given attribute on the given netCDF4 Data type, casting attributes as we go to bytes rather than unicode. @@ -321,9 +324,7 @@ def _data_fillvalue_check(arraylib, data, check_value): class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning): - """ - Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`. - """ + """Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`.""" # TODO: remove at the next major release. pass @@ -331,6 +332,8 @@ class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning): def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): """ + Work out whether there was a possible or actual fill-value collision. + From the given information, work out whether there was a possible or actual fill-value collision, and if so construct a warning. @@ -342,12 +345,12 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): whether the data array was masked contains_fill_value : bool whether the data array contained the fill-value - warn : bool + warn : bool, optional if True, also issue any resulting warning immediately. Returns ------- - None or :class:`Warning` + None or :class:`Warning` If not None, indicates a known or possible problem with filling """ @@ -388,15 +391,15 @@ class Saver: def __init__(self, filename, netcdf_format, compute=True): """ - A manager for saving netcdf files. + Manage saving netcdf files. Parameters ---------- - filename : string or netCDF4.Dataset + filename : str or netCDF4.Dataset Name of the netCDF file to save the cube. OR a writeable object supporting the :class:`netCF4.Dataset` api. - netcdf_format : string + netcdf_format : str Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. @@ -547,16 +550,13 @@ def write( fill_value=None, ): """ - Wrapper for saving cubes to a NetCDF file. - - Args: + Wrap for saving cubes to a NetCDF file. - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. - - Kwargs: - - * local_keys (iterable of strings): + local_keys : iterable of str An interable of cube attribute keys. Any cube attributes with matching keys will become attributes on the data variable rather than global attributes. @@ -565,46 +565,38 @@ def write( Has no effect if :attr:`iris.FUTURE.save_split_attrs` is ``True``. - * unlimited_dimensions (iterable of strings and/or - :class:`iris.coords.Coord` objects): + unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord` List of coordinate names (or coordinate objects) corresponding to coordinate dimensions of `cube` to save with the NetCDF dimension variable length 'UNLIMITED'. By default, no unlimited dimensions are saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED' dimensions. - - * zlib (bool): + zlib : bool If `True`, the data will be compressed in the netCDF file using gzip compression (default `False`). - - * complevel (int): + complevel : int An integer between 1 and 9 describing the level of compression desired (default 4). Ignored if `zlib=False`. - - * shuffle (bool): + shuffle : bool If `True`, the HDF5 shuffle filter will be applied before compressing the data (default `True`). This significantly improves compression. Ignored if `zlib=False`. - - * fletcher32 (bool): + fletcher32 : bool If `True`, the Fletcher32 HDF5 checksum algorithm is activated to detect errors. Default `False`. - - * contiguous (bool): + contiguous : bool If `True`, the variable data is stored contiguously on disk. Default `False`. Setting to `True` for a variable with an unlimited dimension will trigger an error. - - * chunksizes (tuple of int): + chunksizes : tuple of int Used to manually specify the HDF5 chunksizes for each dimension of the variable. A detailed discussion of HDF chunking and I/O - performance is available here: - https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html. + performance is available + `here `__. Basically, you want the chunk size for each dimension to match as closely as possible the size of the data block that users will read from the file. `chunksizes` cannot be set if `contiguous=True`. - - * endian (string): + endian : str Used to control whether the data is stored in little or big endian format on disk. Possible values are 'little', 'big' or 'native' (default). The library will automatically handle endian conversions @@ -612,8 +604,7 @@ def write( on a computer with the opposite format as the one used to create the file, there may be some performance advantage to be gained by setting the endian-ness. - - * least_significant_digit (int): + least_significant_digit : int If `least_significant_digit` is specified, variable data will be truncated (quantized). In conjunction with `zlib=True` this produces 'lossy', but significantly more efficient compression. For @@ -621,17 +612,16 @@ def write( using `numpy.around(scale*data)/scale`, where `scale = 2**bits`, and `bits` is determined so that a precision of 0.1 is retained (in this case `bits=4`). From - http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml: + `here `__: "least_significant_digit -- power of ten of the smallest decimal place in unpacked data that is a reliable value". Default is `None`, or no quantization, or 'lossless' compression. - - * packing (type or string or dict or list): A numpy integer datatype - (signed or unsigned) or a string that describes a numpy integer - dtype(i.e. 'i2', 'short', 'u4') or a dict of packing parameters as - described below. This provides support for netCDF data packing as - described in - https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/best_practices.html#bp_Packed-Data-Values + packing : type or str or dict or list + A numpy integer datatype (signed or unsigned) or a string that + describes a numpy integer dtype(i.e. 'i2', 'short', 'u4') or a + dict of packing parameters as described below. This provides + support for netCDF data packing as described + `here `__. If this argument is a type (or type string), appropriate values of scale_factor and add_offset will be automatically calculated based on `cube.data` and possible masking. For more control, pass a dict @@ -641,20 +631,20 @@ def write( manually using a dict to avoid this. The default is `None`, in which case the datatype is determined from the cube and no packing will occur. - - * fill_value: + fill_value: The value to use for the `_FillValue` attribute on the netCDF variable. If `packing` is specified the value of `fill_value` should be in the domain of the packed data. - Returns: - None. - - .. note:: + Returns + ------- + None. - The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`, - `chunksizes` and `endian` keywords are silently ignored for netCDF - 3 files that do not use HDF5. + Notes + ----- + The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`, + `chunksizes` and `endian` keywords are silently ignored for netCDF + 3 files that do not use HDF5. """ # TODO: when iris.FUTURE.save_split_attrs defaults to True, we can deprecate the @@ -767,6 +757,8 @@ def write( @staticmethod def check_attribute_compliance(container, data_dtype): + """Check attributte complliance.""" + def _coerce_value(val_attr, val_attr_value, data_dtype): val_attr_tmp = np.array(val_attr_value, dtype=data_dtype) if (val_attr_tmp != val_attr_value).any(): @@ -798,15 +790,15 @@ def _coerce_value(val_attr, val_attr_value, data_dtype): container.attributes[val_attr] = new_val def update_global_attributes(self, attributes=None, **kwargs): - """ + """Update the CF global attributes. + Update the CF global attributes based on the provided iterable/dictionary and/or keyword arguments. - Args: - - * attributes (dict or iterable of key, value pairs): + Parameters + ---------- + attributes : dict or iterable of key, value pairs CF global attributes to be updated. - """ # TODO: when when iris.FUTURE.save_split_attrs is removed, this routine will # only be called once: it can reasonably be renamed "_set_global_attributes", @@ -825,23 +817,18 @@ def update_global_attributes(self, attributes=None, **kwargs): def _create_cf_dimensions( self, cube, dimension_names, unlimited_dimensions=None ): - """ - Create the CF-netCDF data dimensions. - - Args: + """Create the CF-netCDF data dimensions. - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` in which to lookup coordinates. - - Kwargs: - - * unlimited_dimensions (iterable of strings and/or - :class:`iris.coords.Coord` objects): + unlimited_dimensions : iterable of strings and/or :class:`iris.coords.Coord` objects): List of coordinates to make unlimited (None by default). - Returns: - None. - + Returns + ------- + None. """ unlimited_dim_names = [] if unlimited_dimensions is not None: @@ -868,6 +855,8 @@ def _create_cf_dimensions( def _add_mesh(self, cube_or_mesh): """ + Add the cube's mesh, and all related variables to the dataset. + Add the cube's mesh, and all related variables to the dataset. Includes all the mesh-element coordinate and connectivity variables. @@ -876,17 +865,16 @@ def _add_mesh(self, cube_or_mesh): Here, we do *not* add the relevant referencing attributes to the data-variable, because we want to create the data-variable later. - Args: - - * cube_or_mesh (:class:`iris.cube.Cube` - or :class:`iris.experimental.ugrid.Mesh`): + Parameters + ---------- + cube_or_mesh : :class:`iris.cube.Cube`or :class:`iris.experimental.ugrid.Mesh` The Cube or Mesh being saved to the netCDF file. - Returns: - * cf_mesh_name (string or None): + Returns + ------- + cf_mesh_name : str or None The name of the mesh variable created, or None if the cube does not have a mesh. - """ cf_mesh_name = None @@ -1004,6 +992,8 @@ def _add_inner_related_vars( self, cube, cf_var_cube, dimension_names, coordlike_elements ): """ + Create a set of variables for aux-coords, ancillaries or cell-measures. + Create a set of variables for aux-coords, ancillaries or cell-measures, and attach them to the parent data variable. @@ -1048,17 +1038,16 @@ def _add_inner_related_vars( def _add_aux_coords(self, cube, cf_var_cube, dimension_names): """ - Add aux. coordinate to the dataset and associate with the data variable - - Args: + Add aux. coordinate to the dataset and associate with the data variable. - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. - * cf_var_cube (:class:`netcdf.netcdf_variable`): + cf_var_cube : :class:`netcdf.netcdf_variable` cf variable cube representation. - * dimension_names (list): + dimension_names : list Names associated with the dimensions of the cube. - """ from iris.experimental.ugrid.mesh import ( Mesh, @@ -1090,17 +1079,16 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names): def _add_cell_measures(self, cube, cf_var_cube, dimension_names): """ - Add cell measures to the dataset and associate with the data variable + Add cell measures to the dataset and associate with the data variable. - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. - * cf_var_cube (:class:`netcdf.netcdf_variable`): + cf_var_cube : :class:`netcdf.netcdf_variable` cf variable cube representation. - * dimension_names (list): + dimension_names : list Names associated with the dimensions of the cube. - """ return self._add_inner_related_vars( cube, @@ -1111,18 +1099,16 @@ def _add_cell_measures(self, cube, cf_var_cube, dimension_names): def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names): """ - Add ancillary variables measures to the dataset and associate with the - data variable + Add ancillary variables measures to the dataset and associate with the data variable. - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. - * cf_var_cube (:class:`netcdf.netcdf_variable`): + cf_var_cube : :class:`netcdf.netcdf_variable` cf variable cube representation. - * dimension_names (list): + dimension_names : list Names associated with the dimensions of the cube. - """ return self._add_inner_related_vars( cube, @@ -1135,13 +1121,12 @@ def _add_dim_coords(self, cube, dimension_names): """ Add coordinate variables to NetCDF dataset. - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. - * dimension_names (list): + dimension_names : list Names associated with the dimensions of the cube. - """ # Ensure we create the netCDF coordinate variables first. for coord in cube.dim_coords: @@ -1155,19 +1140,20 @@ def _add_dim_coords(self, cube, dimension_names): def _add_aux_factories(self, cube, cf_var_cube, dimension_names): """ - Modifies the variables of the NetCDF dataset to represent + Represent the presence of dimensionless vertical coordinates. + + Modify the variables of the NetCDF dataset to represent the presence of dimensionless vertical coordinates based on the aux factories of the cube (if any). - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. - * cf_var_cube (:class:`netcdf.netcdf_variable`) + cf_var_cube: :class:`netcdf.netcdf_variable` CF variable cube representation. - * dimension_names (list): + dimension_names : list Names associated with the dimensions of the cube. - """ primaries = [] for factory in cube.aux_factories: @@ -1253,23 +1239,23 @@ def _get_dim_names(self, cube_or_mesh): """ Determine suitable CF-netCDF data dimension names. - Args: - - * cube_or_mesh (:class:`iris.cube.Cube` - or :class:`iris.experimental.ugrid.Mesh`): + Parameters + ---------- + cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh` The Cube or Mesh being saved to the netCDF file. - Returns: - mesh_dimensions, cube_dimensions - * mesh_dimensions (list of string): - A list of the mesh dimensions of the attached mesh, if any. - * cube_dimensions (list of string): - A lists of dimension names for each dimension of the cube + Returns + ------- + mesh_dimensions : list of str + A list of the mesh dimensions of the attached mesh, if any. + cube_dimensions : list of str + A lists of dimension names for each dimension of the cube - ..note:: - The returned lists are in the preferred file creation order. - One of the mesh dimensions will typically also appear in the cube - dimensions. + Notes + ----- + The returned lists are in the preferred file creation order. + One of the mesh dimensions will typically also appear in the cube + dimensions. """ @@ -1277,6 +1263,8 @@ def record_dimension( names_list, dim_name, length, matching_coords=None ): """ + Record a file dimension, its length and associated "coordinates". + Record a file dimension, its length and associated "coordinates" (which may in fact also be connectivities). @@ -1475,16 +1463,17 @@ def record_dimension( @staticmethod def cf_valid_var_name(var_name): - """ - Return a valid CF var_name given a potentially invalid name. - - Args: + """Return a valid CF var_name given a potentially invalid name. - * var_name (str): + Parameters + ---------- + var_name : str The var_name to normalise - Returns: - A var_name suitable for passing through for variable creation. + Returns + ------- + str + var_name suitable for passing through for variable creation. """ # Replace invalid characters with an underscore ("_"). @@ -1499,17 +1488,17 @@ def _cf_coord_standardised_units(coord): """ Determine a suitable units from a given coordinate. - Args: - - * coord (:class:`iris.coords.Coord`): + Parameters + ---------- + coord : :class:`iris.coords.Coord` A coordinate of a cube. - Returns: + Returns + ------- + units The (standard_name, long_name, unit) of the given :class:`iris.coords.Coord` instance. - """ - units = str(coord.units) # Set the 'units' of 'latitude' and 'longitude' coordinates specified # in 'degrees' to 'degrees_north' and 'degrees_east' respectively, @@ -1561,17 +1550,18 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): """ Create the associated CF-netCDF bounds variable. - Args: - - * coord (:class:`iris.coords.Coord`): + Parameters + ---------- + coord : :class:`iris.coords.Coord` A coordinate of a cube. - * cf_var: + cf_var: CF-netCDF variable - * cf_name (string): + cf_name : str name of the CF-NetCDF variable. - Returns: - None + Returns + ------- + None """ if hasattr(coord, "has_bounds") and coord.has_bounds(): @@ -1619,15 +1609,17 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): def _get_cube_variable_name(self, cube): """ - Returns a CF-netCDF variable name for the given cube. - - Args: + Return a CF-netCDF variable name for the given cube. - * cube (class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` An instance of a cube for which a CF-netCDF variable name is required. - Returns: + Returns + ------- + str A CF-netCDF variable name as a string. """ @@ -1642,18 +1634,19 @@ def _get_cube_variable_name(self, cube): def _get_coord_variable_name(self, cube_or_mesh, coord): """ - Returns a CF-netCDF variable name for a given coordinate-like element. + Return a CF-netCDF variable name for a given coordinate-like element. - Args: - - * cube_or_mesh (:class:`iris.cube.Cube` - or :class:`iris.experimental.ugrid.Mesh`): + Parameters + ---------- + cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh` The Cube or Mesh being saved to the netCDF file. - * coord (:class:`iris.coords._DimensionalMetadata`): + coord : :class:`iris.coords._DimensionalMetadata` An instance of a coordinate (or similar), for which a CF-netCDF variable name is required. - Returns: + Returns + ------- + str A CF-netCDF variable name as a string. """ @@ -1704,15 +1697,17 @@ def _get_coord_variable_name(self, cube_or_mesh, coord): def _get_mesh_variable_name(self, mesh): """ - Returns a CF-netCDF variable name for the mesh. - - Args: + Return a CF-netCDF variable name for the mesh. - * mesh (:class:`iris.experimental.ugrid.mesh.Mesh`): + Parameters + ---------- + mesh : :class:`iris.experimental.ugrid.mesh.Mesh` An instance of a Mesh for which a CF-netCDF variable name is required. - Returns: + Returns + ------- + str A CF-netCDF variable name as a string. """ @@ -1731,12 +1726,14 @@ def _create_mesh(self, mesh): """ Create a mesh variable in the netCDF dataset. - Args: - - * mesh (:class:`iris.experimental.ugrid.mesh.Mesh`): + Parameters + ---------- + mesh : :class:`iris.experimental.ugrid.mesh.Mesh` The Mesh to be saved to CF-netCDF file. - Returns: + Returns + ------- + str The string name of the associated CF-netCDF variable saved. """ @@ -1811,6 +1808,8 @@ def _create_generic_cf_array_var( fill_value=None, ): """ + Create theCF-netCDF variable given dimensional_metadata. + Create the associated CF-netCDF variable in the netCDF dataset for the given dimensional_metadata. @@ -1818,33 +1817,32 @@ def _create_generic_cf_array_var( If the metadata element is a coord, it may also contain bounds. In which case, an additional var is created and linked to it. - Args: - - * cube_or_mesh (:class:`iris.cube.Cube` - or :class:`iris.experimental.ugrid.Mesh`): + Parameters + ---------- + cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh` The Cube or Mesh being saved to the netCDF file. - * cube_dim_names (list of string): + cube_dim_names : list of str The name of each dimension of the cube. - * element: + element : :class:`iris.coords._DimensionalMetadata` An Iris :class:`iris.coords._DimensionalMetadata`, belonging to the cube. Provides data, units and standard/long/var names. Not used if 'element_dims' is not None. - * element_dims (list of string, or None): + element_dims : list of str, or None If set, contains the variable dimension (names), otherwise these are taken from `element.cube_dims[cube]`. For Mesh components (element coordinates and connectivities), this *must* be passed in, as "element.cube_dims" does not function. - * fill_value (number or None): + fill_value : number or None If set, create the variable with this fill-value, and fill any masked data points with this value. If not set, standard netcdf4-python behaviour : the variable has no '_FillValue' property, and uses the "standard" fill-value for its type. - Returns: - var_name (string): - The name of the CF-netCDF variable created. - + Returns + ------- + str + The name of the CF-netCDF variable created. """ # Support cube or mesh save. from iris.cube import Cube @@ -1960,16 +1958,17 @@ def _create_cf_cell_methods(self, cube, dimension_names): """ Create CF-netCDF string representation of a cube cell methods. - Args: - - * cube (:class:`iris.cube.Cube`) or cubelist - (:class:`iris.cube.CubeList`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList` A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of cubes to be saved to a netCDF file. - * dimension_names (list): + dimension_names : list Names associated with the dimensions of the cube. - Returns: + Returns + ------- + str CF-netCDF string representation of a cube cell methods. """ @@ -2009,20 +2008,22 @@ def _create_cf_cell_methods(self, cube, dimension_names): def _create_cf_grid_mapping(self, cube, cf_var_cube): """ + Create CF-netCDF grid mapping and associated CF-netCDF variable. + Create CF-netCDF grid mapping variable and associated CF-netCDF data variable grid mapping attribute. - Args: - - * cube (:class:`iris.cube.Cube`) or cubelist - (:class:`iris.cube.CubeList`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList` A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of cubes to be saved to a netCDF file. - * cf_var_cube (:class:`netcdf.netcdf_variable`): + cf_var_cube : :class:`netcdf.netcdf_variable` cf variable cube representation. - Returns: - None + Returns + ------- + None """ cs = cube.coord_system("CoordSystem") @@ -2278,32 +2279,30 @@ def _create_cf_data_variable( **kwargs, ): """ - Create CF-netCDF data variable for the cube and any associated grid - mapping. + Create CF-netCDF data variable for the cube and any associated grid mapping. + # TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can # be removed. - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` The associated cube being saved to CF-netCDF file. - * dimension_names (list): + dimension_names : list String names for each dimension of the cube. - - Kwargs: - - * local_keys (iterable of strings): - * see :func:`iris.fileformats.netcdf.Saver.write` - * packing (type or string or dict or list): - * see :func:`iris.fileformats.netcdf.Saver.write` - * fill_value: - * see :func:`iris.fileformats.netcdf.Saver.write` + local_keys : iterable of str, optional + See :func:`iris.fileformats.netcdf.Saver.write` + packing : type or str or dict or list, optional + See :func:`iris.fileformats.netcdf.Saver.write` + fill_value : optional + See :func:`iris.fileformats.netcdf.Saver.write` All other keywords are passed through to the dataset's `createVariable` method. - Returns: - The newly created CF-netCDF data variable. + Returns + ------- + The newly created CF-netCDF data variable. """ # TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can @@ -2454,13 +2453,14 @@ def _increment_name(self, varname): Avoidance of conflicts between variable names, where the name is incremented to distinguish it from others. - Args: - - * varname (string): + Parameters + ---------- + varname : str Variable name to increment. - Returns: - Incremented varname. + Returns + ------- + Incremented varname. """ num = 0 @@ -2567,18 +2567,19 @@ def store(data, cf_var, fill_info): def delayed_completion(self) -> Delayed: """ - Create and return a :class:`dask.delayed.Delayed` to perform file completion - for delayed saves. + Perform file completion for delayed saves. + + Create and return a :class:`dask.delayed.Delayed` to perform file + completion for delayed saves. - This contains all the delayed writes, which complete the file by filling out - the data of variables initially created empty, and also the checks for - potential fill-value collisions. - When computed, it returns a list of any warnings which were generated in the - save operation. + This contains all the delayed writes, which complete the file by + filling out the data of variables initially created empty, and also the + checks for potential fill-value collisions. When computed, it returns + a list of any warnings which were generated in the save operation. Returns ------- - completion : :class:`dask.delayed.Delayed` + :class:`dask.delayed.Delayed` Notes ----- @@ -2691,7 +2692,7 @@ def save( fill_value=None, compute=True, ): - """ + r""" Save cube(s) to a netCDF file, given the cube and the filename. * Iris will write CF 1.7 compliant NetCDF files. @@ -2712,13 +2713,12 @@ def save( status of the cube's data payload, unless the netcdf_format is explicitly specified to be 'NETCDF3' or 'NETCDF3_CLASSIC'. - Args: - - * cube (:class:`iris.cube.Cube` or :class:`iris.cube.CubeList`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList` A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or other iterable of cubes to be saved to a netCDF file. - - * filename (string): + filename : str Name of the netCDF file to save the cube(s). **Or** an open, writeable :class:`netCDF4.Dataset`, or compatible object. @@ -2726,58 +2726,50 @@ def save( When saving to a dataset, ``compute`` **must** be ``False`` : See the ``compute`` parameter. - Kwargs: - - * netcdf_format (string): + netcdf_format : str Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. - - * local_keys (iterable of strings): + local_keys : iterable of str, optional An interable of cube attribute keys. Any cube attributes with matching keys will become attributes on the data variable rather than global attributes. - **NOTE:** this is *ignored* if 'split-attribute saving' is **enabled**, - i.e. when ``iris.FUTURE.save_split_attrs`` is ``True``. - * unlimited_dimensions (iterable of strings and/or - :class:`iris.coords.Coord` objects): + .. note:: + This is *ignored* if 'split-attribute saving' is **enabled**, + i.e. when ``iris.FUTURE.save_split_attrs`` is ``True``. + + unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord` objects, optional List of coordinate names (or coordinate objects) corresponding to coordinate dimensions of `cube` to save with the NetCDF dimension variable length 'UNLIMITED'. By default, no unlimited dimensions are saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED' dimensions. - - * zlib (bool): + zlib : bool, optional If `True`, the data will be compressed in the netCDF file using gzip compression (default `False`). - - * complevel (int): + complevel : int An integer between 1 and 9 describing the level of compression desired (default 4). Ignored if `zlib=False`. - - * shuffle (bool): + shuffle : bool, optional If `True`, the HDF5 shuffle filter will be applied before compressing the data (default `True`). This significantly improves compression. Ignored if `zlib=False`. - - * fletcher32 (bool): + fletcher32 : bool, optional If `True`, the Fletcher32 HDF5 checksum algorithm is activated to detect errors. Default `False`. - - * contiguous (bool): + contiguous : bool, optional If `True`, the variable data is stored contiguously on disk. Default `False`. Setting to `True` for a variable with an unlimited dimension will trigger an error. - - * chunksizes (tuple of int): + chunksizes : tuple of int, optional Used to manually specify the HDF5 chunksizes for each dimension of the variable. A detailed discussion of HDF chunking and I/O performance is - available here: https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html. + available + `here `__. Basically, you want the chunk size for each dimension to match as closely as possible the size of the data block that users will read from the file. `chunksizes` cannot be set if `contiguous=True`. - - * endian (string): + endian : str Used to control whether the data is stored in little or big endian format on disk. Possible values are 'little', 'big' or 'native' (default). The library will automatically handle endian conversions @@ -2785,8 +2777,7 @@ def save( computer with the opposite format as the one used to create the file, there may be some performance advantage to be gained by setting the endian-ness. - - * least_significant_digit (int): + least_significant_digit : int, optional If `least_significant_digit` is specified, variable data will be truncated (quantized). In conjunction with `zlib=True` this produces 'lossy', but significantly more efficient compression. For example, if @@ -2794,17 +2785,17 @@ def save( `numpy.around(scale*data)/scale`, where `scale = 2**bits`, and `bits` is determined so that a precision of 0.1 is retained (in this case `bits=4`). From - http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml: + "least_significant_digit -- power of ten of the smallest decimal place in unpacked data that is a reliable value". Default is `None`, or no quantization, or 'lossless' compression. - - * packing (type or string or dict or list): A numpy integer datatype - (signed or unsigned) or a string that describes a numpy integer dtype - (i.e. 'i2', 'short', 'u4') or a dict of packing parameters as described - below or an iterable of such types, strings, or dicts. - This provides support for netCDF data packing as described in - https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/best_practices.html#bp_Packed-Data-Values + packing : type or str or dict or list, optional + A numpy integer datatype (signed or unsigned) or a string that + describes a numpy integer dtype (i.e. 'i2', 'short', 'u4') or a dict + of packing parameters as described below or an iterable of such types, + strings, or dicts. This provides support for netCDF data packing as + described in + `here `__ If this argument is a type (or type string), appropriate values of scale_factor and add_offset will be automatically calculated based on `cube.data` and possible masking. For more control, pass a dict with @@ -2814,18 +2805,16 @@ def save( avoid this. The default is `None`, in which case the datatype is determined from the cube and no packing will occur. If this argument is a list it must have the same number of elements as `cube` if `cube` is - a `:class:`iris.cube.CubeList`, or one element, and each element of + a :class:`iris.cube.CubeList`, or one element, and each element of this argument will be applied to each cube separately. - - * fill_value (numeric or list): + fill_value : numeric or list, optional The value to use for the `_FillValue` attribute on the netCDF variable. If `packing` is specified the value of `fill_value` should be in the domain of the packed data. If this argument is a list it must have the same number of elements as `cube` if `cube` is a - `:class:`iris.cube.CubeList`, or a single element, and each element of + :class:`iris.cube.CubeList`, or a single element, and each element of this argument will be applied to each cube separately. - - * compute (bool): + compute : bool, optional Default is ``True``, meaning complete the file immediately, and return ``None``. When ``False``, create the output file but don't write any lazy array content to @@ -2837,7 +2826,7 @@ def save( .. Note:: when computed, the returned :class:`dask.delayed.Delayed` object returns - a list of :class:`Warning`\\s : These are any warnings which *would* have + a list of :class:`Warning` : These are any warnings which *would* have been issued in the save call, if ``compute`` had been ``True``. .. Note:: @@ -2848,21 +2837,18 @@ def save( must (re-)open the dataset for writing, which will fail if the file is still open for writing by the caller. - Returns: - result (None, or dask.delayed.Delayed): - If `compute=True`, returns `None`. - Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed - writing to fill in the variables data. - - .. note:: - - The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`, - `chunksizes` and `endian` keywords are silently ignored for netCDF 3 - files that do not use HDF5. - - .. seealso:: - - NetCDF Context manager (:class:`~Saver`). + Returns + ------- + result: None or dask.delayed.Delayed + If `compute=True`, returns `None`. + Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed + writing to fill in the variables data. + + Notes + ----- + The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`, + `chunksizes` and `endian` keywords are silently ignored for netCDF 3 + files that do not use HDF5. """ from iris.cube import Cube, CubeList diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index c8e02a40cf..87725789e5 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -2,10 +2,8 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides an interface to manage URI scheme support in iris. -""" +"""Provides an interface to manage URI scheme support in iris.""" import collections from collections import OrderedDict @@ -39,29 +37,27 @@ def __setitem__(self, key, value): def run_callback(callback, cube, field, filename): """ - Runs the callback mechanism given the appropriate arguments. + Run the callback mechanism given the appropriate arguments. - Args: - - * callback: + Parameters + ---------- + callback : A function to add metadata from the originating field and/or URI which obeys the following rules: - 1. Function signature must be: ``(cube, field, filename)``. - 2. Modifies the given cube inplace, unless a new cube is - returned by the function. - 3. If the cube is to be rejected the callback must raise - an :class:`iris.exceptions.IgnoreCubeException`. - - .. note:: + 1. Function signature must be: ``(cube, field, filename)``. + 2. Modifies the given cube inplace, unless a new cube is + returned by the function. + 3. If the cube is to be rejected the callback must raise + an :class:`iris.exceptions.IgnoreCubeException`. - It is possible that this function returns None for certain callbacks, - the caller of this function should handle this case. - - .. note:: + Notes + ----- + It is possible that this function returns None for certain callbacks, + the caller of this function should handle this case. - This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ from iris.cube import Cube @@ -88,7 +84,7 @@ def run_callback(callback, cube, field, filename): def decode_uri(uri, default="file"): r""" - Decodes a single URI into scheme and scheme-specific parts. + Decode a single URI into scheme and scheme-specific parts. In addition to well-formed URIs, it also supports bare file paths as strings or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are @@ -100,25 +96,26 @@ def decode_uri(uri, default="file"): from iris.io import * - Examples: - >>> from iris.io import decode_uri - >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b')) - ('http', '//www.thing.com:8080/resource?id=a:b') + Examples + -------- + >>> from iris.io import decode_uri + >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b')) + ('http', '//www.thing.com:8080/resource?id=a:b') - >>> print(decode_uri('file:///data/local/dataZoo/...')) - ('file', '///data/local/dataZoo/...') + >>> print(decode_uri('file:///data/local/dataZoo/...')) + ('file', '///data/local/dataZoo/...') - >>> print(decode_uri('/data/local/dataZoo/...')) - ('file', '/data/local/dataZoo/...') + >>> print(decode_uri('/data/local/dataZoo/...')) + ('file', '/data/local/dataZoo/...') - >>> print(decode_uri('file:///C:\data\local\dataZoo\...')) - ('file', '///C:\\data\\local\\dataZoo\\...') + >>> print(decode_uri('file:///C:\data\local\dataZoo\...')) + ('file', '///C:\\data\\local\\dataZoo\\...') - >>> print(decode_uri('C:\data\local\dataZoo\...')) - ('file', 'C:\\data\\local\\dataZoo\\...') + >>> print(decode_uri('C:\data\local\dataZoo\...')) + ('file', 'C:\\data\\local\\dataZoo\\...') - >>> print(decode_uri('dataZoo/...')) - ('file', 'dataZoo/...') + >>> print(decode_uri('dataZoo/...')) + ('file', 'dataZoo/...') >>> print(decode_uri({})) ('data', {}) @@ -156,7 +153,7 @@ def expand_filespecs(file_specs, files_expected=True): ---------- file_specs : iterable of str File paths which may contain ``~`` elements or wildcards. - files_expected : bool, default=True + files_expected : bool, optional, default=True Whether file is expected to exist (i.e. for load). Returns @@ -205,14 +202,16 @@ def expand_filespecs(file_specs, files_expected=True): def load_files(filenames, callback, constraints=None): """ - Takes a list of filenames which may also be globs, and optionally a + Create a generator of Cubes from given files. + + Take a list of filenames which may also be globs, and optionally a constraint set and a callback function, and returns a generator of Cubes from the given files. - .. note:: - - Typically, this function should not be called directly; instead, the - intended interface for loading is :func:`iris.load`. + Notes + ----- + Typically, this function should not be called directly; instead, the + intended interface for loading is :func:`iris.load`. """ from iris.fileformats import FORMAT_AGENT @@ -243,13 +242,15 @@ def load_files(filenames, callback, constraints=None): def load_http(urls, callback): """ - Takes a list of OPeNDAP URLs and a callback function, and returns a generator - of Cubes from the given URLs. + Create generator of Cubes from the given OPeNDAP URLs. - .. note:: + Take a list of OPeNDAP URLs and a callback function, and returns a generator + of Cubes from the given URLs. - Typically, this function should not be called directly; instead, the - intended interface for loading is :func:`iris.load`. + Notes + ----- + Typically, this function should not be called directly; instead, the + intended interface for loading is :func:`iris.load`. """ # @@ -276,8 +277,8 @@ def load_http(urls, callback): def load_data_objects(urls, callback): """ - Takes a list of data-source objects and a callback function, and returns a - generator of Cubes. + Take a list of data-source objects and a callback function, returns a generator of Cubes. + The 'objects' take the place of 'uris' in the load calls. The appropriate types of the data-source objects are expected to be recognised by the handlers : This is done in the usual way by passing the @@ -345,12 +346,16 @@ def add_saver(file_extension, new_saver): """ Add a custom saver to the Iris session. - Args: - - * file_extension: A string such as "pp" or "my_format". - * new_saver: A function of the form ``my_saver(cube, target)``. + Parameters + ---------- + file_extension : str + A string such as "pp" or "my_format". + new_saver : function + A function of the form ``my_saver(cube, target)``. - See also :func:`iris.io.save` + See Also + -------- + :func:`iris.io.save` """ # Make sure it's a func with 2+ args @@ -368,14 +373,16 @@ def find_saver(filespec): """ Find the saver function appropriate to the given filename or extension. - Args: - - * filespec - A string such as "my_file.pp" or "PP". + Parameters + ---------- + filespec : str + A string such as "my_file.pp" or "PP". - Returns: - A save function or None. - Save functions can be passed to :func:`iris.io.save`. + Returns + ------- + Save function + Save functions can be passed to :func:`iris.io.save`. Value may also + be None. """ _check_init_savers() @@ -400,12 +407,12 @@ def save(source, target, saver=None, **kwargs): Iris currently supports three file formats for saving, which it can recognise by filename extension: - * netCDF - the Unidata network Common Data Format: - * see :func:`iris.fileformats.netcdf.save` - * GRIB2 - the WMO GRIdded Binary data format: - * see :func:`iris_grib.save_grib2`. - * PP - the Met Office UM Post Processing Format: - * see :func:`iris.fileformats.pp.save` + * netCDF - the Unidata network Common Data Format: + * see :func:`iris.fileformats.netcdf.save` + * GRIB2 - the WMO GRIdded Binary data format: + * see :func:`iris_grib.save_grib2`. + * PP - the Met Office UM Post Processing Format: + * see :func:`iris.fileformats.pp.save` A custom saver can be provided to the function to write to a different file format. @@ -469,8 +476,7 @@ def save(source, target, saver=None, **kwargs): >>> iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC") Notes - ------ - + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index d2d3b5fd41..da64345cf3 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -3,9 +3,10 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """ -A module to provide convenient file format identification through a combination of filename extension -and file based *magic* numbers. +Provide convenient file format identification. +A module to provide convenient file format identification through a combination +of filename extension and file based *magic* numbers. To manage a collection of FormatSpecifications for loading:: @@ -24,9 +25,11 @@ with open(png_filename, 'rb') as png_fh: handling_spec = fagent.get_spec(png_filename, png_fh) -In the example, handling_spec will now be the png_spec previously added to the agent. +In the example, handling_spec will now be the png_spec previously added to the +agent. -Now that a specification has been found, if a handler has been given with the specification, then the file can be handled:: +Now that a specification has been found, if a handler has been given with the +specification, then the file can be handled:: handler = handling_spec.handler if handler is None: @@ -34,8 +37,8 @@ else: result = handler(filename) -The calling sequence of handler is dependent on the function given in the original specification and can be customised to your project's needs. - +The calling sequence of handler is dependent on the function given in the +original specification and can be customised to your project's needs. """ @@ -47,10 +50,14 @@ class FormatAgent: """ - The FormatAgent class is the containing object which is responsible for identifying the format of a given file - by interrogating its children FormatSpecification instances. + Identifies format of a given file by interrogating its children instances. + + The FormatAgent class is the containing object which is responsible for + identifying the format of a given file by interrogating its children + FormatSpecification instances. - Typically a FormatAgent will be created empty and then extended with the :meth:`FormatAgent.add_spec` method:: + Typically a FormatAgent will be created empty and then extended with the + :meth:`FormatAgent.add_spec` method:: agent = FormatAgent() agent.add_spec(NetCDF_specification) @@ -62,12 +69,11 @@ class FormatAgent: """ def __init__(self, format_specs=None): - """ """ self._format_specs = list(format_specs or []) self._format_specs.sort() def add_spec(self, format_spec): - """Add a FormatSpecification instance to this agent for format consideration.""" + """Add a FormatSpecification instance to this agent for format.""" self._format_specs.append(format_spec) self._format_specs.sort() @@ -82,15 +88,22 @@ def __str__(self): def get_spec(self, basename, buffer_obj): """ + Pick the first FormatSpecification. + Pick the first FormatSpecification which can handle the given filename and file/buffer object. - .. note:: + Parameters + ---------- + basename : TBD + buffer_obj : TBD - ``buffer_obj`` may be ``None`` when a seekable file handle is not - feasible (such as over the http protocol). In these cases only the - format specifications which do not require a file handle are - tested. + Notes + ----- + ``buffer_obj`` may be ``None`` when a seekable file handle is not + feasible (such as over the http protocol). In these cases only the + format specifications which do not require a file handle are + tested. """ element_cache = {} @@ -145,8 +158,10 @@ class FormatSpecification: """ Provides the base class for file type definition. - Every FormatSpecification instance has a name which can be accessed with the :attr:`FormatSpecification.name` property and - a FileElement, such as filename extension or 32-bit magic number, with an associated value for format identification. + Every FormatSpecification instance has a name which can be accessed with + the :attr:`FormatSpecification.name` property and a FileElement, such as + filename extension or 32-bit magic number, with an associated value for + format identification. """ @@ -160,20 +175,26 @@ def __init__( constraint_aware_handler=False, ): """ - Constructs a new FormatSpecification given the format_name and particular FileElements - - Args: - - * format_name - string name of fileformat being described - * file_element - FileElement instance of the element which identifies this FormatSpecification - * file_element_value - The value that the file_element should take if a file matches this FormatSpecification - - Kwargs: - - * handler - function which will be called when the specification has been identified and is required to handler a format. - If None, then the file can still be identified but no handling can be done. - * priority - Integer giving a priority for considering this specification where higher priority means sooner consideration. - + Construct a new FormatSpecification. + + Parameters + ---------- + format_name : str + string name of fileformat being described + file_element : + FileElement instance of the element which identifies this + FormatSpecification + file_element_value : + The value that the file_element should take if a file matches this + FormatSpecification + handler : optional + function which will be called when the specification has been + identified and is required to handler a format. If None, then the + file can still be identified but no handling can be done. + priority: int + Integer giving a priority for considering this specification where + higher priority means sooner consideration. + constraint_aware_handler: optional, default=False """ if not isinstance(file_element, FileElement): raise ValueError( @@ -189,26 +210,29 @@ def __init__( self.constraint_aware_handler = constraint_aware_handler def __hash__(self): - # Hashed by specification for consistent ordering in FormatAgent (including self._handler in this hash - # for example would order randomly according to object id) + # Hashed by specification for consistent ordering in FormatAgent + # (including self._handler in this hash for example would order + # randomly according to object id) return hash(self._file_element) @property def file_element(self): + # noqa D102 return self._file_element @property def file_element_value(self): + # noqa D102 return self._file_element_value @property def name(self): - """The name of this FileFormat. (Read only)""" + """The name of this FileFormat. (Read only).""" return self._format_name @property def handler(self): - """The handler function of this FileFormat. (Read only)""" + """The handler function of this FileFormat. (Read only).""" return self._handler def _sort_key(self): @@ -230,7 +254,8 @@ def __ne__(self, other): return not (self == other) def __repr__(self): - # N.B. loader is not always going to provide a nice repr if it is a lambda function, hence a prettier version is available in __str__ + # N.B. loader is not always going to provide a nice repr if it is a + # lambda function, hence a prettier version is available in __str__ return "FormatSpecification(%r, %r, %r, handler=%r, priority=%s)" % ( self._format_name, self._file_element, @@ -249,23 +274,27 @@ def __str__(self): class FileElement: """ - Represents a specific aspect of a FileFormat which can be identified using the given element getter function. + Represents a specific aspect of a FileFormat. + + Represents a specific aspect of a FileFormat which can be identified using + the given element getter function. """ def __init__(self, requires_fh=True): """ - Constructs a new file element, which may require a file buffer. + Construct a new file element, which may require a file buffer. - Kwargs: - - * requires_fh - Whether this FileElement needs a file buffer. + Parameters + ---------- + requires_fh : optional + Whether this FileElement needs a file buffer. """ self.requires_fh = requires_fh def get_element(self, basename, file_handle): - """Called when identifying the element of a file that this FileElement is representing.""" + """Identify the element of a file that this FileElement is representing.""" raise NotImplementedError("get_element must be defined in a subclass") def __hash__(self): @@ -286,6 +315,7 @@ def __init__(self, num_bytes, offset=None): self._offset = offset def get_element(self, basename, file_handle): + # noqa D102 if self._offset is not None: file_handle.seek(self._offset) bytes = file_handle.read(self._num_bytes) @@ -306,6 +336,7 @@ class FileExtension(FileElement): """A :class:`FileElement` that returns the extension from the filename.""" def get_element(self, basename, file_handle): + # noqa D102 return os.path.splitext(basename)[1] @@ -313,11 +344,14 @@ class LeadingLine(FileElement): """A :class:`FileElement` that returns the first line from the file.""" def get_element(self, basename, file_handle): + # noqa: D102 return file_handle.readline() class UriProtocol(FileElement): """ + Return the scheme and part from a URI, using :func:`~iris.io.decode_uri`. + A :class:`FileElement` that returns the "scheme" and "part" from a URI, using :func:`~iris.io.decode_uri`. @@ -327,6 +361,7 @@ def __init__(self): FileElement.__init__(self, requires_fh=False) def get_element(self, basename, file_handle): + # noqa: D102 from iris.io import decode_uri return decode_uri(basename)[0] @@ -345,7 +380,10 @@ def __init__(self): super().__init__(requires_fh=False) def get_element(self, basename, file_handle): - # In this context, there should *not* be a file opened by the handler. - # Just return 'basename', which in this case is not a name, or even a - # string, but a passed 'data object'. + """ + In this context, there should *not* be a file opened by the handler. + + Just return 'basename', which in this case is not a name, or even a + string, but a passed 'data object'. + """ return basename diff --git a/lib/iris/time.py b/lib/iris/time.py index ddedeedd91..6ba85a0051 100644 --- a/lib/iris/time.py +++ b/lib/iris/time.py @@ -2,17 +2,16 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Time handling. -""" +"""Time handling.""" import functools @functools.total_ordering class PartialDateTime: - """ + """Allow partial comparisons against datetime-like objects. + A :class:`PartialDateTime` object specifies values for some subset of the calendar/time fields (year, month, hour, etc.) for comparing with :class:`datetime.datetime`-like instances. @@ -44,7 +43,7 @@ class PartialDateTime: #: A dummy value provided as a workaround to allow comparisons with #: :class:`datetime.datetime`. #: See http://bugs.python.org/issue8005. - # NB. It doesn't even matter what this value is. + #: NB. It doesn't even matter what this value is. timetuple = None def __init__( @@ -57,20 +56,28 @@ def __init__( second=None, microsecond=None, ): - """ - Allows partial comparisons against datetime-like objects. - - Args: - - * year (int): - * month (int): - * day (int): - * hour (int): - * minute (int): - * second (int): - * microsecond (int): - - For example, to select any days of the year after the 3rd of April: + """Allow partial comparisons against datetime-like objects. + + Parameters + ---------- + year : int + The year number as an integer, or None. + month : int + The month number as an integer, or None. + day : int + The day number as an integer, or None. + hour : int + The hour number as an integer, or None. + minute : int + The minute number as an integer, or None. + second : int + The second number as an integer, or None. + microsecond : int + The microsecond number as an integer, or None. + + Examples + -------- + To select any days of the year after the 3rd of April: >>> from iris.time import PartialDateTime >>> import datetime @@ -85,20 +92,12 @@ def __init__( False """ - - #: The year number as an integer, or None. self.year = year - #: The month number as an integer, or None. self.month = month - #: The day number as an integer, or None. self.day = day - #: The hour number as an integer, or None. self.hour = hour - #: The minute number as an integer, or None. self.minute = minute - #: The second number as an integer, or None. self.second = second - #: The microsecond number as an integer, or None. self.microsecond = microsecond def __repr__(self): From 932fd93234e9f888ab40e28ff56a0d7fefb9f18f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 7 Dec 2023 15:56:02 +0000 Subject: [PATCH 098/134] Possible citation updates -- not clear whether appropriate. (#5453) * Possible citation updates -- not clear whether appropriate. * Update docs/src/userguide/citation.rst revert conversion to url -- it should be a bare DOI --- docs/src/userguide/citation.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/src/userguide/citation.rst b/docs/src/userguide/citation.rst index 62af43c94f..7169ca3072 100644 --- a/docs/src/userguide/citation.rst +++ b/docs/src/userguide/citation.rst @@ -16,11 +16,11 @@ For example:: @manual{Iris, author = {{Met Office}}, title = {Iris: A powerful, format-agnostic, and community-driven Python package for analysing and visualising Earth science data}, - edition = {v3.6}, + edition = {v3.7}, year = {2010 - 2023}, address = {Exeter, Devon}, - url = {http://scitools.org.uk/}, - doi = {10.5281/zenodo.7948293} + url = {https://github.com/SciTools/iris}, + doi = {10.5281/zenodo.8305232} } @@ -34,7 +34,7 @@ Suggested format:: For example:: - Iris. v3.5. 27-Apr-2023. Met Office. UK. https://doi.org/10.5281/zenodo.7871017 22-12-2022 + Iris. v3.7. 31-Aug-2023. Met Office. UK. https://doi.org/10.5281/zenodo.8305232 22-12-2022 ******************** @@ -47,7 +47,7 @@ Suggested format:: For example:: - Iris. Met Office. git@github.com:SciTools/iris.git 22-12-2022 + Iris. Met Office. https://github.com/SciTools/iris.git 31-08-2023 .. _How to cite and describe software: https://software.ac.uk/how-cite-software From bbe0b679a0951295dd1cd807682d88008f2bc3c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 16:16:21 +0000 Subject: [PATCH 099/134] Bump actions/setup-python from 4 to 5 (#5614) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/benchmarks_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml index cb5110dda5..45b857cc31 100644 --- a/.github/workflows/benchmarks_report.yml +++ b/.github/workflows/benchmarks_report.yml @@ -75,7 +75,7 @@ jobs: - name: Set up Python # benchmarks/bm_runner.py only needs builtins to run. - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 - name: Post reports env: From ab10aa2870fcfa14ccdc380afcd6dd162c61b6da Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 16:25:50 +0000 Subject: [PATCH 100/134] Updated environment lockfiles (#5608) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 10 +++++----- requirements/locks/py311-linux-64.lock | 10 +++++----- requirements/locks/py39-linux-64.lock | 10 +++++----- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 631227ca51..f02da60d5c 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -18,7 +18,7 @@ https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda#d459949bc10f64dee1595c176c2e6291 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -43,7 +43,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda#603827b39ea2b835268adb8c821b8570 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -164,7 +164,7 @@ https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.c https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py310h2372a71_0.conda#c2dcff257e040bcda00e2a30a9d85333 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.9.0-pyha770c72_0.conda#9677d53e8eb8e3282e9d84c5d0c525d7 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 @@ -180,7 +180,7 @@ https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.9.0-hd8ed1ab_0.conda#a0c28e5b7f824a19bd8ee9255c9bd58c https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py310hb13e2d6_0.conda#d3147cfbf72d6ae7bba10562208f6def https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f @@ -190,7 +190,7 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hd41b1e2_4.conda#35e87277fba9944b8a975113538bb5df -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py310h1f7b6fc_0.conda#31beda75384647959d5792a1a7dc571a https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py310hd41b1e2_0.conda#85d2aaa7af046528d339da1e813c3a9f https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index d85b20ee07..142d50df86 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -18,7 +18,7 @@ https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda#d459949bc10f64dee1595c176c2e6291 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -43,7 +43,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda#603827b39ea2b835268adb8c821b8570 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -163,7 +163,7 @@ https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.c https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py311h459d7ec_0.conda#5b24692ece82f89e5cb9a469d9619731 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.9.0-pyha770c72_0.conda#9677d53e8eb8e3282e9d84c5d0c525d7 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 @@ -179,7 +179,7 @@ https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.9.0-hd8ed1ab_0.conda#a0c28e5b7f824a19bd8ee9255c9bd58c https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py311h64a7726_0.conda#fd2f142dcd680413b5ede5d0fb799205 https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f @@ -189,7 +189,7 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py311h1f0f07a_0.conda#b7e6d52b39e199238c3400cafaabafb3 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index fa5fa80250..c43d2d1c61 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -18,7 +18,7 @@ https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda#d459949bc10f64dee1595c176c2e6291 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -43,7 +43,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda#603827b39ea2b835268adb8c821b8570 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 @@ -162,7 +162,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_1.conda#e5b62f0c1f96413116f16d33973f1a44 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py39hd1e30aa_0.conda#616bc0b442acefebdbe97c7b885d771e https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.9.0-pyha770c72_0.conda#9677d53e8eb8e3282e9d84c5d0c525d7 https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 @@ -180,7 +180,7 @@ https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.9.0-hd8ed1ab_0.conda#a0c28e5b7f824a19bd8ee9255c9bd58c https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py39h474f0d3_0.conda#459a58eda3e74dd5e3d596c618e7f20a https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f @@ -189,7 +189,7 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39h7633fee_4.conda#b66595fbda99771266f042f42c7457be -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py39h44dd56e_0.conda#baea2f5dfb3ab7b1c836385d2e1daca7 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39h7633fee_0.conda#ed71ad3e30eb03da363fb797419cce98 https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 From 80ac9d447564c8efc07799a81a43d1998ca08d1d Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Fri, 8 Dec 2023 10:58:05 +0100 Subject: [PATCH 101/134] Faster and simpler iris.util.array_equal (#5610) --- lib/iris/util.py | 22 +++------------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/lib/iris/util.py b/lib/iris/util.py index 10a58fdef0..0e4e951350 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -22,7 +22,7 @@ import numpy.ma as ma from iris._deprecation import warn_deprecated -from iris._lazy_data import as_concrete_data, is_lazy_data, is_lazy_masked_data +from iris._lazy_data import is_lazy_data, is_lazy_masked_data from iris.common import SERVICES from iris.common.lenient import _lenient_client import iris.exceptions @@ -400,25 +400,9 @@ def normalise_array(array): eq = array1.shape == array2.shape if eq: eqs = array1 == array2 - if withnans and (array1.dtype.kind == "f" or array2.dtype.kind == "f"): - nans1, nans2 = np.isnan(array1), np.isnan(array2) - eq = as_concrete_data(np.all(nans1 == nans2)) - - if eq: - eqs = as_concrete_data(eqs) - if not is_lazy_data(nans1): - idxs = nans1 - elif not is_lazy_data(nans2): - idxs = nans2 - else: - idxs = as_concrete_data(nans1) - - if np.any(idxs): - eqs[idxs] = True - - if eq: - eq = as_concrete_data(np.all(eqs)) # check equal at all points + eqs = np.where(np.isnan(array1) & np.isnan(array2), True, eqs) + eq = bool(np.all(eqs)) return eq From 8694479607d5967561affa05b85a6b8fb49477df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Dec 2023 17:42:44 +0000 Subject: [PATCH 102/134] Bump actions/stale from 8 to 9 (#5616) Bumps [actions/stale](https://github.com/actions/stale) from 8 to 9. - [Release notes](https://github.com/actions/stale/releases) - [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/stale/compare/v8...v9) --- updated-dependencies: - dependency-name: actions/stale dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/stale.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 8e18b36491..3df5aa3a18 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -14,7 +14,7 @@ jobs: if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest steps: - - uses: actions/stale@v8 + - uses: actions/stale@v9 with: repo-token: ${{ secrets.GITHUB_TOKEN }} From 1e1a2157efe475dd2ace52b5c43b472c8c826f51 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Mon, 11 Dec 2023 16:18:49 +0100 Subject: [PATCH 103/134] Update links to https (#5621) * Update links to https * Update test and docstring --- CHANGES | 4 ++-- README.md | 2 +- benchmarks/asv.conf.json | 2 +- .../general/plot_anomaly_log_colouring.py | 4 ++-- docs/src/IEP/IEP001.adoc | 18 +++++++++--------- docs/src/conf.py | 18 +++++++++--------- docs/src/copyright.rst | 2 +- .../contributing_deprecations.rst | 2 +- .../contributing_documentation_full.rst | 2 +- .../documenting/rest_guide.rst | 10 +++++----- docs/src/developers_guide/gitwash/LICENSE | 2 +- .../src/developers_guide/gitwash/git_links.inc | 4 ++-- docs/src/developers_guide/release.rst | 2 +- docs/src/further_topics/ugrid/data_model.rst | 2 +- docs/src/further_topics/um_files_loading.rst | 6 +++--- docs/src/index.rst | 2 +- docs/src/userguide/cube_maths.rst | 2 +- docs/src/userguide/glossary.rst | 4 ++-- docs/src/userguide/plotting_a_cube.rst | 6 +++--- docs/src/userguide/real_and_lazy_data.rst | 2 +- docs/src/whatsnew/1.0.rst | 8 ++++---- docs/src/whatsnew/1.13.rst | 2 +- docs/src/whatsnew/1.4.rst | 4 ++-- docs/src/whatsnew/1.6.rst | 2 +- docs/src/whatsnew/2.0.rst | 4 ++-- docs/src/whatsnew/2.3.rst | 14 +++++++------- docs/src/whatsnew/3.1.rst | 4 ++-- docs/src/whatsnew/3.2.rst | 4 ++-- docs/src/why_iris.rst | 6 +++--- lib/iris/analysis/maths.py | 2 +- lib/iris/common/mixin.py | 2 +- lib/iris/cube.py | 2 +- lib/iris/etc/palette/diverging/BrBG_11.txt | 4 ++-- lib/iris/etc/palette/diverging/PRGn_11.txt | 4 ++-- lib/iris/etc/palette/diverging/PiYG_11.txt | 4 ++-- lib/iris/etc/palette/diverging/PuOr_11.txt | 4 ++-- lib/iris/etc/palette/diverging/RdBu_11.txt | 4 ++-- lib/iris/etc/palette/diverging/RdGy_11.txt | 4 ++-- lib/iris/etc/palette/diverging/RdYlBu_11.txt | 4 ++-- lib/iris/etc/palette/diverging/RdYlGn_11.txt | 4 ++-- lib/iris/etc/palette/diverging/Spectral_11.txt | 4 ++-- lib/iris/etc/palette/qualitative/Accent_08.txt | 4 ++-- lib/iris/etc/palette/qualitative/Dark2_08.txt | 4 ++-- lib/iris/etc/palette/qualitative/Paired_12.txt | 4 ++-- .../etc/palette/qualitative/Pastel1_09.txt | 4 ++-- .../etc/palette/qualitative/Pastel2_08.txt | 4 ++-- lib/iris/etc/palette/qualitative/Set1_09.txt | 4 ++-- lib/iris/etc/palette/qualitative/Set2_08.txt | 4 ++-- lib/iris/etc/palette/qualitative/Set3_12.txt | 4 ++-- lib/iris/etc/palette/sequential/Blues_09.txt | 4 ++-- lib/iris/etc/palette/sequential/BuGn_09.txt | 4 ++-- lib/iris/etc/palette/sequential/BuPu_09.txt | 4 ++-- lib/iris/etc/palette/sequential/GnBu_09.txt | 4 ++-- lib/iris/etc/palette/sequential/Greens_09.txt | 4 ++-- lib/iris/etc/palette/sequential/Greys_09.txt | 4 ++-- lib/iris/etc/palette/sequential/OrRd_09.txt | 4 ++-- lib/iris/etc/palette/sequential/Oranges_09.txt | 4 ++-- lib/iris/etc/palette/sequential/PuBuGn_09.txt | 4 ++-- lib/iris/etc/palette/sequential/PuBu_09.txt | 4 ++-- lib/iris/etc/palette/sequential/PuRd_09.txt | 4 ++-- lib/iris/etc/palette/sequential/Purples_09.txt | 4 ++-- lib/iris/etc/palette/sequential/RdPu_09.txt | 4 ++-- lib/iris/etc/palette/sequential/Reds_09.txt | 4 ++-- lib/iris/etc/palette/sequential/YlGnBu_09.txt | 4 ++-- lib/iris/etc/palette/sequential/YlGn_09.txt | 4 ++-- lib/iris/etc/palette/sequential/YlOrBr_09.txt | 4 ++-- lib/iris/etc/palette/sequential/YlOrRd_09.txt | 4 ++-- lib/iris/experimental/raster.py | 4 ++-- lib/iris/experimental/regrid_conservative.py | 2 +- lib/iris/fileformats/netcdf/saver.py | 6 +++--- lib/iris/fileformats/nimrod.py | 2 +- lib/iris/io/__init__.py | 6 +++--- lib/iris/pandas.py | 2 +- lib/iris/symbols.py | 2 +- lib/iris/tests/test_io_init.py | 8 ++++---- lib/iris/tests/test_load.py | 2 +- .../geometry/test_geometry_area_weights.py | 2 +- .../ugrid/load/test_load_meshes.py | 4 ++-- .../tests/unit/time/test_PartialDateTime.py | 2 +- lib/iris/time.py | 2 +- tools/generate_std_names.py | 4 ++-- tools/release_do_nothing.py | 2 +- 82 files changed, 170 insertions(+), 170 deletions(-) diff --git a/CHANGES b/CHANGES index cdb2b64f84..b3916a97b6 100644 --- a/CHANGES +++ b/CHANGES @@ -373,7 +373,7 @@ Features added Incompatible changes -------------------- -* The Iris data model is now fully aligned with the `CF data model `_ . +* The Iris data model is now fully aligned with the `CF data model `_ . Iris remains file-format independent, as is the underlying CF data model. * Cube merging has been re-written for the new CF data model with the benefit that some corner cases are now better handled. Some users may find that their cubes, once merged, now have a smaller total shape and more intelligent handling of dimension coordinate picking. @@ -433,7 +433,7 @@ Features added given cubes (see :func:`iris.iterate.izip`). * Cell methods will now appear in the printout of a cube. * Supporting software dependency versions have been updated. Of particular note is matplotlib which has gone from version 1.0.1 - up to `1.1.0 `_ . This may have a small impact on + up to `1.1.0 `_ . This may have a small impact on some plot element placements. Incompatible changes diff --git a/README.md b/README.md index f857608718..233c0edd39 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ developer version or the most recent released alt="#showyourstripes Global 1850-2021">

-**Graphics and Lead Scientist**: [Ed Hawkins](http://www.met.reading.ac.uk/~ed/home/index.php), National Centre for Atmospheric Science, University of Reading. +**Graphics and Lead Scientist**: [Ed Hawkins](https://www.met.reading.ac.uk/~ed/home/index.php), National Centre for Atmospheric Science, University of Reading. **Data**: Berkeley Earth, NOAA, UK Met Office, MeteoSwiss, DWD, SMHI, UoR, Meteo France & ZAMG. diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index fab5bcb44e..4184629448 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -4,7 +4,7 @@ "project_url": "https://github.com/SciTools/iris", "repo": "..", "environment_type": "conda-delegated", - "show_commit_url": "http://github.com/scitools/iris/commit/", + "show_commit_url": "https://github.com/scitools/iris/commit/", "branches": ["upstream/main"], "benchmark_dir": "./benchmarks", diff --git a/docs/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py index 9a9383650d..02b60f957d 100644 --- a/docs/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -21,7 +21,7 @@ :meth:`iris.plot.pcolormesh`, which call the underlying matplotlib functions of the same names (i.e., :obj:`matplotlib.pyplot.pcolor` and :obj:`matplotlib.pyplot.pcolormesh`). -See also: http://en.wikipedia.org/wiki/False_color#Pseudocolor. +See also: https://en.wikipedia.org/wiki/False_color#Pseudocolor. """ @@ -62,7 +62,7 @@ def main(): # Use a standard colour map which varies blue-white-red. # For suitable options, see the 'Diverging colormaps' section in: - # http://matplotlib.org/stable/gallery/color/colormap_reference.html + # https://matplotlib.org/stable/gallery/color/colormap_reference.html anom_cmap = "bwr" # Create a 'logarithmic' data normalization. diff --git a/docs/src/IEP/IEP001.adoc b/docs/src/IEP/IEP001.adoc index e43969f3ce..2daef2363a 100644 --- a/docs/src/IEP/IEP001.adoc +++ b/docs/src/IEP/IEP001.adoc @@ -162,12 +162,12 @@ There is a risk that this topic could bog down when dealing with non-standard ca * Boolean array indexing * Lambdas? * What to do about constrained loading? -* Relationship to http://scitools.org.uk/iris/docs/v1.9.2/iris/iris/cube.html#iris.cube.Cube.intersection[iris.cube.Cube.intersection]? +* Relationship to https://scitools.org.uk/iris/docs/v1.9.2/iris/iris/cube.html#iris.cube.Cube.intersection[iris.cube.Cube.intersection]? * Relationship to interpolation (especially nearest-neighbour)? ** e.g. What to do about values that don't exist? *** pandas throws a KeyError -*** xarray supports (several) nearest-neighbour schemes via http://xarray.pydata.org/en/stable/indexing.html#nearest-neighbor-lookups[`data.sel()`] -*** Apparently http://holoviews.org/[holoviews] does nearest-neighbour interpolation. +*** xarray supports (several) nearest-neighbour schemes via https://xarray.pydata.org/en/stable/indexing.html#nearest-neighbor-lookups[`data.sel()`] +*** Apparently https://holoviews.org/[holoviews] does nearest-neighbour interpolation. * multi-dimensional coordinate => unroll? * var_name only selection? `cube.vloc(t0=12)` * Orthogonal only? Or also independent? `cube.loc_points(lon=[1, 1, 5], lat=[31, 33, 32])` @@ -185,9 +185,9 @@ cube.interpolate( ## References . Iris - * http://scitools.org.uk/iris/docs/v1.9.2/iris/iris.html#iris.Constraint[iris.Constraint] - * http://scitools.org.uk/iris/docs/v1.9.2/userguide/subsetting_a_cube.html[Subsetting a cube] -. http://pandas.pydata.org/pandas-docs/stable/indexing.html[pandas indexing] -. http://xarray.pydata.org/en/stable/indexing.html[xarray indexing] -. http://legacy.python.org/dev/peps/pep-0472/[PEP 472 - Support for indexing with keyword arguments] -. http://nbviewer.jupyter.org/gist/rsignell-usgs/13d7ce9d95fddb4983d4cbf98be6c71d[Time slicing NetCDF or OPeNDAP datasets] - Rich Signell's xarray/iris comparison focussing on time handling and performance + * https://scitools.org.uk/iris/docs/v1.9.2/iris/iris.html#iris.Constraint[iris.Constraint] + * https://scitools.org.uk/iris/docs/v1.9.2/userguide/subsetting_a_cube.html[Subsetting a cube] +. https://pandas.pydata.org/pandas-docs/stable/indexing.html[pandas indexing] +. https://xarray.pydata.org/en/stable/indexing.html[xarray indexing] +. https://legacy.python.org/dev/peps/pep-0472/[PEP 472 - Support for indexing with keyword arguments] +. https://nbviewer.jupyter.org/gist/rsignell-usgs/13d7ce9d95fddb4983d4cbf98be6c71d[Time slicing NetCDF or OPeNDAP datasets] - Rich Signell's xarray/iris comparison focussing on time handling and performance diff --git a/docs/src/conf.py b/docs/src/conf.py index c59aca4909..ea12b83aaf 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -380,19 +380,19 @@ def _dotv(version): # url link checker. Some links work but report as broken, lets ignore them. # See https://www.sphinx-doc.org/en/1.2/config.html#options-for-the-linkcheck-builder linkcheck_ignore = [ - "http://catalogue.ceda.ac.uk/uuid/82adec1f896af6169112d09cc1174499", - "http://cfconventions.org", - "http://code.google.com/p/msysgit/downloads/list", - "http://effbot.org", + "https://catalogue.ceda.ac.uk/uuid/82adec1f896af6169112d09cc1174499", + "https://cfconventions.org", + "https://code.google.com/p/msysgit/downloads/list", + "https://effbot.org", "https://help.github.com", "https://docs.github.com", "https://github.com", - "http://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html", - "http://scitools.github.com/cartopy", - "http://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf", + "https://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html", + "https://scitools.github.com/cartopy", + "https://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf", "https://software.ac.uk/how-cite-software", - "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", - "http://www.nationalarchives.gov.uk/doc/open-government-licence", + "https://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", + "https://www.nationalarchives.gov.uk/doc/open-government-licence", "https://www.metoffice.gov.uk/", "https://biggus.readthedocs.io/", "https://stickler-ci.com/", diff --git a/docs/src/copyright.rst b/docs/src/copyright.rst index d5996fd999..513d281c07 100644 --- a/docs/src/copyright.rst +++ b/docs/src/copyright.rst @@ -29,7 +29,7 @@ are licensed under the UK's Open Government Licence: You may use and re-use the information featured on this website (not including logos) free of charge in any format or medium, under the terms of the - `Open Government Licence `_. + `Open Government Licence `_. We encourage users to establish hypertext links to this website. Any email enquiries regarding the use and re-use of this information resource should be diff --git a/docs/src/developers_guide/contributing_deprecations.rst b/docs/src/developers_guide/contributing_deprecations.rst index 0b22e2cbd2..8c5cb21feb 100644 --- a/docs/src/developers_guide/contributing_deprecations.rst +++ b/docs/src/developers_guide/contributing_deprecations.rst @@ -7,7 +7,7 @@ If you need to make a backwards-incompatible change to a public API [#public-api]_ that has been included in a release (e.g. deleting a method), then you must first deprecate the old behaviour in at least one release, before removing/updating it in the next -`major release `_. +`major release `_. Adding a Deprecation diff --git a/docs/src/developers_guide/contributing_documentation_full.rst b/docs/src/developers_guide/contributing_documentation_full.rst index df850cb2c4..5cb5269fa1 100755 --- a/docs/src/developers_guide/contributing_documentation_full.rst +++ b/docs/src/developers_guide/contributing_documentation_full.rst @@ -101,7 +101,7 @@ run:: See :data:`iris.cube.Cube.data` for an example of using the `doctest`_ approach. -.. _doctest: http://www.sphinx-doc.org/en/stable/ext/doctest.html +.. _doctest: https://www.sphinx-doc.org/en/stable/ext/doctest.html The hyperlinks in the documentation can be checked automatically. If there is a link that is known to work it can be excluded from the checks by diff --git a/docs/src/developers_guide/documenting/rest_guide.rst b/docs/src/developers_guide/documenting/rest_guide.rst index c4330b1e63..9e8c1107b0 100644 --- a/docs/src/developers_guide/documenting/rest_guide.rst +++ b/docs/src/developers_guide/documenting/rest_guide.rst @@ -15,14 +15,14 @@ source format. This guide will cover some of the more frequently used advanced reST markup syntaxes, for the basics of reST the following links may be useful: * https://www.sphinx-doc.org/en/master/usage/restructuredtext/ -* http://packages.python.org/an_example_pypi_project/sphinx.html +* https://packages.python.org/an_example_pypi_project/sphinx.html -Reference documentation for reST can be found at http://docutils.sourceforge.net/rst.html. +Reference documentation for reST can be found at https://docutils.sourceforge.net/rst.html. Creating Links -------------- -Basic links can be created with ```Text of the link `_`` -which will look like `Text of the link `_ +Basic links can be created with ```Text of the link `_`` +which will look like `Text of the link `_ Documents in the same project can be cross referenced with the syntax @@ -41,4 +41,4 @@ syntax ``:py:class:`zipfile.ZipFile``` which will result in links such as :py:class:`zipfile.ZipFile` and :py:class:`numpy.ndarray`. -.. _reST: http://en.wikipedia.org/wiki/ReStructuredText +.. _reST: https://en.wikipedia.org/wiki/ReStructuredText diff --git a/docs/src/developers_guide/gitwash/LICENSE b/docs/src/developers_guide/gitwash/LICENSE index 0ea9a5957b..cd8441c161 100644 --- a/docs/src/developers_guide/gitwash/LICENSE +++ b/docs/src/developers_guide/gitwash/LICENSE @@ -3,7 +3,7 @@ ========= We release the documents under the Creative Commons attribution license: -http://creativecommons.org/licenses/by/3.0/ +https://creativecommons.org/licenses/by/3.0/ We release the code under the simplified BSD license: diff --git a/docs/src/developers_guide/gitwash/git_links.inc b/docs/src/developers_guide/gitwash/git_links.inc index bf20d13e5f..3ced13703f 100644 --- a/docs/src/developers_guide/gitwash/git_links.inc +++ b/docs/src/developers_guide/gitwash/git_links.inc @@ -8,11 +8,11 @@ __not_case_sensitive__, so only one target definition is needed for nipy, NIPY, Nipy, etc... -.. _git: http://git-scm.com/ +.. _git: https://git-scm.com/ .. _github: https://github.com .. _github help: https://help.github.com .. _git documentation: https://git-scm.com/docs -.. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html +.. _linux git workflow: https://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html .. |emdash| unicode:: U+02014 diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index 97d7918148..c7ce230204 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -280,7 +280,7 @@ For further details on how to test Iris, see :ref:`developer_running_tests`. .. _rc_iris: https://anaconda.org/conda-forge/iris/labels .. _Generating Distribution Archives: https://packaging.python.org/tutorials/packaging-projects/#generating-distribution-archives .. _Packaging Your Project: https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-your-project -.. _latest CF standard names: http://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml +.. _latest CF standard names: https://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml .. _setuptools-scm: https://github.com/pypa/setuptools_scm .. _Semantic Versioning: https://semver.org/ .. _PEP 440: https://peps.python.org/pep-0440/ diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/further_topics/ugrid/data_model.rst index 0b4334e0f0..cad461340d 100644 --- a/docs/src/further_topics/ugrid/data_model.rst +++ b/docs/src/further_topics/ugrid/data_model.rst @@ -38,7 +38,7 @@ The Detail (e.g. Inkscape). They were originally made in MS PowerPoint. Uses the IBM Colour Blind Palette (see - http://ibm-design-language.eu-de.mybluemix.net/design/language/resources/color-library + https://ibm-design-language.eu-de.mybluemix.net/design/language/resources/color-library ) Structured Grids (the old world) diff --git a/docs/src/further_topics/um_files_loading.rst b/docs/src/further_topics/um_files_loading.rst index 9d9393f16d..c5238e6b70 100644 --- a/docs/src/further_topics/um_files_loading.rst +++ b/docs/src/further_topics/um_files_loading.rst @@ -31,14 +31,14 @@ Notes: #. Iris treats Fieldsfile data almost exactly as if it were PP -- i.e. it treats each field's lookup table entry like a PP header. #. The Iris data model is based on - `NetCDF CF conventions `_, so most of this can + `NetCDF CF conventions `_, so most of this can also be seen as a metadata translation between PP and CF terms, but it is easier to discuss in terms of Iris elements. For details of Iris terms (cubes, coordinates, attributes), refer to :ref:`Iris data structures `. -For details of CF conventions, see http://cfconventions.org/. +For details of CF conventions, see https://cfconventions.org/. Overview of Loading Process --------------------------- @@ -335,7 +335,7 @@ Time Information In Iris (as in CF) times and time intervals are both expressed as simple numbers, following the approach of the -`UDUNITS project `_. +`UDUNITS project `_. These values are stored as cube coordinates, where the scaling and calendar information is contained in the :attr:`~iris.coords.Coord.units` property. diff --git a/docs/src/index.rst b/docs/src/index.rst index b353406f58..a9bf76fc96 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -8,7 +8,7 @@ Iris **A powerful, format-agnostic, community-driven Python package for analysing and visualising Earth science data.** -Iris implements a data model based on the `CF conventions `_ +Iris implements a data model based on the `CF conventions `_ giving you a powerful, format-agnostic interface for working with your data. It excels when working with multi-dimensional Earth Science data, where tabular representations become unwieldy and inefficient. diff --git a/docs/src/userguide/cube_maths.rst b/docs/src/userguide/cube_maths.rst index 56a2041bd3..79c91ca61b 100644 --- a/docs/src/userguide/cube_maths.rst +++ b/docs/src/userguide/cube_maths.rst @@ -115,7 +115,7 @@ Notice that the calculation of the *anomaly* involves subtracting a because cube broadcasting is performed during cube arithmetic operations. Cube broadcasting follows similar broadcasting rules as -`NumPy `_, but +`NumPy `_, but the additional richness of Iris coordinate meta-data provides an enhanced capability beyond the basic broadcasting behaviour of NumPy. diff --git a/docs/src/userguide/glossary.rst b/docs/src/userguide/glossary.rst index 5c24f03372..6ab93125bd 100644 --- a/docs/src/userguide/glossary.rst +++ b/docs/src/userguide/glossary.rst @@ -22,7 +22,7 @@ Glossary This also forms the data model which iris is based on. | **Related:** :term:`NetCDF Format` - | **More information:** `CF Conventions `_ + | **More information:** `CF Conventions `_ | Coordinate @@ -188,7 +188,7 @@ Glossary Standard Name A name describing a :term:`phenomenon`, one from a fixed list - defined at `CF Standard Names `_. + defined at `CF Standard Names `_. | **Related:** :term:`Long Name` **|** :term:`Cube` | **More information:** :doc:`iris_cubes` diff --git a/docs/src/userguide/plotting_a_cube.rst b/docs/src/userguide/plotting_a_cube.rst index 70a1fbba91..f152690835 100644 --- a/docs/src/userguide/plotting_a_cube.rst +++ b/docs/src/userguide/plotting_a_cube.rst @@ -209,7 +209,7 @@ the temperature at some latitude cross-sections. that any useful functions or variables defined within the script can be imported into other scripts without running all of the code and thus creating an unwanted plot. This is discussed in more detail at - ``_. + ``_. In order to run this example, you will need to copy the code into a file and run it using ``python my_file.py``. @@ -285,7 +285,7 @@ These colour schemes are freely available under the following licence:: Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -293,7 +293,7 @@ These colour schemes are freely available under the following licence:: specific language governing permissions and limitations under the License. To include a reference in a journal article or report please refer to -`section 5 `_ +`section 5 `_ in the citation guidance provided by Cynthia Brewer. For adding citations to Iris plots, see :ref:`brewer-cite` (below). diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/userguide/real_and_lazy_data.rst index e4c041886c..2b3ecf9e64 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/userguide/real_and_lazy_data.rst @@ -233,7 +233,7 @@ both Iris cubes and coordinates, and for computing deferred operations on lazy a Dask provides processing options to control how deferred operations on lazy arrays are computed. This is provided via the ``dask.set_options`` interface. See the -`dask documentation `_ +`dask documentation `_ for more information on setting dask processing options. diff --git a/docs/src/whatsnew/1.0.rst b/docs/src/whatsnew/1.0.rst index c256c33566..a2456c12db 100644 --- a/docs/src/whatsnew/1.0.rst +++ b/docs/src/whatsnew/1.0.rst @@ -83,7 +83,7 @@ CF Name For convenience, Iris also includes the :class:`~iris.coord_systems.OSGB` class which provides a simple way to create the transverse Mercator coordinate system used by the British -`Ordnance Survey `_. +`Ordnance Survey `_. .. _whats-new-cartopy: @@ -92,7 +92,7 @@ Using Cartopy for Mapping in Matplotlib --------------------------------------- The underlying map drawing package has now been updated to use -`Cartopy `_. Cartopy provides a +`Cartopy `_. Cartopy provides a highly flexible set of mapping tools, with a consistent, intuitive interface. As yet it doesn't have feature-parity with basemap, but its goal is to make maps "just work", making it the perfect complement to Iris. @@ -132,7 +132,7 @@ interface: and :func:`matplotlib.pyplot.gca` should be used instead. For more examples of what can be done with Cartopy, see the Iris gallery and -`Cartopy's documentation `_. +`Cartopy's documentation `_. Hybrid-Pressure @@ -202,7 +202,7 @@ function. The recommended text for the Cynthia Brewer citation is provided by :data:`iris.plot.BREWER_CITE`. To include a reference in a journal article or report please refer to -`section 5 `_ +`section 5 `_ in the citation guidance provided by Cynthia Brewer. diff --git a/docs/src/whatsnew/1.13.rst b/docs/src/whatsnew/1.13.rst index 028c298505..4a2ecd8dbe 100644 --- a/docs/src/whatsnew/1.13.rst +++ b/docs/src/whatsnew/1.13.rst @@ -51,7 +51,7 @@ Bug Fixes * The CF conventions state that certain ``formula_terms`` terms may be omitted and assumed to be zero - (http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#dimensionless-v-coord) + (https://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#dimensionless-v-coord) so Iris now allows factories to be constructed with missing terms. * In the User Guide's contour plot example, clabel inline is set to be False diff --git a/docs/src/whatsnew/1.4.rst b/docs/src/whatsnew/1.4.rst index 912a1e3bad..24a98488af 100644 --- a/docs/src/whatsnew/1.4.rst +++ b/docs/src/whatsnew/1.4.rst @@ -58,7 +58,7 @@ Features * Use the latest release of Cartopy, v0.8.0. -.. _OPeNDAP: http://www.opendap.org +.. _OPeNDAP: https://www.opendap.org .. _exp-regrid: Experimental Regridding Enhancements @@ -132,7 +132,7 @@ Cubes can now be loaded directly from the internet, via OPeNDAP_. For example:: - cubes = iris.load("http://geoport.whoi.edu/thredds/dodsC/bathy/gom15") + cubes = iris.load("https://geoport.whoi.edu/thredds/dodsC/bathy/gom15") .. _geotiff_export: diff --git a/docs/src/whatsnew/1.6.rst b/docs/src/whatsnew/1.6.rst index f1c50a3f08..4b179b67d6 100644 --- a/docs/src/whatsnew/1.6.rst +++ b/docs/src/whatsnew/1.6.rst @@ -40,7 +40,7 @@ Features iris.Constraint(time=lambda cell: cell.point.month != 1 and cell.point.day != 1) Note that, :class:`iris.Future` also supports a - `context manager `_ + `context manager `_ which allows multiple sections of code to execute with different run-time behaviour. diff --git a/docs/src/whatsnew/2.0.rst b/docs/src/whatsnew/2.0.rst index 4ef50a4101..1ee159c662 100644 --- a/docs/src/whatsnew/2.0.rst +++ b/docs/src/whatsnew/2.0.rst @@ -295,6 +295,6 @@ Documentation .. _Biggus: https://biggus.readthedocs.io/en/latest/ -.. _Dask: http://dask.pydata.org/en/latest/ +.. _Dask: https://dask.pydata.org/en/latest/ .. _iris_grib: https://github.com/SciTools/iris-grib/ -.. _schedulers: http://dask.pydata.org/en/latest/scheduler-overview.html +.. _schedulers: https://dask.pydata.org/en/latest/scheduler-overview.html diff --git a/docs/src/whatsnew/2.3.rst b/docs/src/whatsnew/2.3.rst index 9220bb89da..bec45a6603 100644 --- a/docs/src/whatsnew/2.3.rst +++ b/docs/src/whatsnew/2.3.rst @@ -29,7 +29,7 @@ Features .. admonition:: Climatological Coordinate Support Iris can now load, store and save `NetCDF climatological coordinates - `_. Any cube time coordinate can be marked as a climatological time axis using the boolean property: ``climatological``. The climatological bounds are stored in the @@ -41,7 +41,7 @@ Features coordinate's ``bounds`` property are written to a NetCDF boundary variable called '_bounds'. These are in place of a standard 'bounds' attribute and accompanying boundary variable. See below - for an `example adapted from CF conventions `_: @@ -116,7 +116,7 @@ Features * New coordinate system: :class:`iris.coord_systems.Geostationary`, including load and save support, based on the `CF Geostationary projection - definition `_. * :class:`iris.coord_systems.VerticalPerspective` can now be saved to and @@ -126,7 +126,7 @@ Features :class:`iris.analysis.PointInCell` to make this regridding scheme public * Iris now supports standard name modifiers. See - `Appendix C, Standard Name Modifiers `_ + `Appendix C, Standard Name Modifiers `_ for more information. * :meth:`iris.cube.Cube.remove_cell_measure` now also allows removal of a cell @@ -138,10 +138,10 @@ Features realisation of the original cube data. * Iris now supports NetCDF Climate and Forecast (CF) Metadata Conventions 1.7 - (see `CF 1.7 Conventions Document `_ for more information) + (see `CF 1.7 Conventions Document `_ for more information) * Updated standard name support to - `CF standard name table version 70, 2019-12-10 `_ + `CF standard name table version 70, 2019-12-10 `_ * Updated UM STASH translations to `metarelate/metOcean commit 448f2ef, 2019-11-29 `_ @@ -164,7 +164,7 @@ Bugs Fixed points. * :class:`iris.coord_systems.VerticalPerspective` coordinate system now uses - the `CF Vertical perspective definition `_; had been erroneously using Geostationary. diff --git a/docs/src/whatsnew/3.1.rst b/docs/src/whatsnew/3.1.rst index 744543f514..02e06bb532 100644 --- a/docs/src/whatsnew/3.1.rst +++ b/docs/src/whatsnew/3.1.rst @@ -281,7 +281,7 @@ This document explains the changes made to Iris for this release (:pull:`4198`) #. `@lbdreyer`_ updated the CF standard name table to the latest version: - `v77 `_. + `v77 `_. (:pull:`4282`) #. `@jamesp`_ updated a test to the latest numpy version (:pull:`3977`) @@ -322,4 +322,4 @@ This document explains the changes made to Iris for this release .. _Python 3.6: https://www.python.org/downloads/release/python-360/ .. _Python 3.8: https://www.python.org/downloads/release/python-380/ .. _README.md: https://github.com/SciTools/iris#----- -.. _xxhash: http://cyan4973.github.io/xxHash/ +.. _xxhash: https://cyan4973.github.io/xxHash/ diff --git a/docs/src/whatsnew/3.2.rst b/docs/src/whatsnew/3.2.rst index 87a85f9061..ce544a5ecc 100644 --- a/docs/src/whatsnew/3.2.rst +++ b/docs/src/whatsnew/3.2.rst @@ -332,7 +332,7 @@ v3.2.1 (11 Mar 2022) =========== #. `@trexfeathers`_ set the linkcheck to ignore - http://www.nationalarchives.gov.uk/doc/open-government-licence since this + https://www.nationalarchives.gov.uk/doc/open-government-licence since this always works locally, but never within CI. (:pull:`4307`) #. `@wjbenfold`_ netCDF integration tests now skip ``TestConstrainedLoad`` if @@ -399,7 +399,7 @@ v3.2.1 (11 Mar 2022) Whatsnew resources in alphabetical order: .. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ +.. _UGRID: https://ugrid-conventions.github.io/ugrid-conventions/ .. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid .. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba .. _sort-all: https://github.com/aio-libs/sort-all diff --git a/docs/src/why_iris.rst b/docs/src/why_iris.rst index 82b791b4bd..6c9b5fb7fb 100644 --- a/docs/src/why_iris.rst +++ b/docs/src/why_iris.rst @@ -6,12 +6,12 @@ Why Iris **A powerful, format-agnostic, community-driven Python package for analysing and visualising Earth science data.** -Iris implements a data model based on the `CF conventions `_ +Iris implements a data model based on the `CF conventions `_ giving you a powerful, format-agnostic interface for working with your data. It excels when working with multi-dimensional Earth Science data, where tabular representations become unwieldy and inefficient. -`CF Standard names `_, +`CF Standard names `_, `units `_, and coordinate metadata are built into Iris, giving you a rich and expressive interface for maintaining an accurate representation of your data. Its treatment of data and @@ -33,7 +33,7 @@ A number of file formats are recognised by Iris, including CF-compliant NetCDF, GRIB, and PP, and it has a plugin architecture to allow other formats to be added seamlessly. -Building upon `NumPy `_ and +Building upon `NumPy `_ and `dask `_, Iris scales from efficient single-machine workflows right through to multi-core clusters and HPC. Interoperability with packages from the wider scientific Python ecosystem comes diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index a24203ba2a..d17d3ea93c 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -740,7 +740,7 @@ def apply_ufunc( ): """ Apply a `numpy universal function - `_ to a cube + `_ to a cube or pair of cubes. .. note:: Many of the numpy.ufunc have been implemented explicitly in Iris diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index a1b1e4647b..b6cd3132b5 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -68,7 +68,7 @@ class LimitedAttributeDict(dict): :data:`iris.common.mixin.LimitedAttributeDict.CF_ATTRS_FORBIDDEN` . All the forbidden attributes are amongst those listed in - `Appendix A of the CF Conventions: `_ + `Appendix A of the CF Conventions: `_ -- however, not *all* of them, since not all are interpreted by Iris. """ diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 8aa0b452d5..0018a5abe8 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -1103,7 +1103,7 @@ def __setitem__(self, key, value): **However** a handful of "known normally global" cases, as defined by CF, go into ``.globals`` instead. At present these are : ('conventions', 'featureType', 'history', 'title'). - See `CF Conventions, Appendix A: `_ . + See `CF Conventions, Appendix A: `_ . """ # If an attribute of this name is already present, update that diff --git a/lib/iris/etc/palette/diverging/BrBG_11.txt b/lib/iris/etc/palette/diverging/BrBG_11.txt index 7243c178ae..bf1c566f81 100644 --- a/lib/iris/etc/palette/diverging/BrBG_11.txt +++ b/lib/iris/etc/palette/diverging/BrBG_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: diverging # keyword: anomaly # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.329412 0.188235 0.019608 diff --git a/lib/iris/etc/palette/diverging/PRGn_11.txt b/lib/iris/etc/palette/diverging/PRGn_11.txt index 32fdee2871..e78d56e04d 100644 --- a/lib/iris/etc/palette/diverging/PRGn_11.txt +++ b/lib/iris/etc/palette/diverging/PRGn_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: diverging # keyword: anomaly # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.250980 0.000000 0.294118 diff --git a/lib/iris/etc/palette/diverging/PiYG_11.txt b/lib/iris/etc/palette/diverging/PiYG_11.txt index b52c50acbc..826c48e361 100644 --- a/lib/iris/etc/palette/diverging/PiYG_11.txt +++ b/lib/iris/etc/palette/diverging/PiYG_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: diverging # keyword: anomaly # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.556863 0.003922 0.321569 diff --git a/lib/iris/etc/palette/diverging/PuOr_11.txt b/lib/iris/etc/palette/diverging/PuOr_11.txt index 8e9c32b8d0..4785c4b536 100644 --- a/lib/iris/etc/palette/diverging/PuOr_11.txt +++ b/lib/iris/etc/palette/diverging/PuOr_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -17,7 +17,7 @@ # std_name: air_pressure_at_sea_level # keyword: anomaly # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.176471 0.000000 0.294118 diff --git a/lib/iris/etc/palette/diverging/RdBu_11.txt b/lib/iris/etc/palette/diverging/RdBu_11.txt index 526132e2a0..f7da164953 100644 --- a/lib/iris/etc/palette/diverging/RdBu_11.txt +++ b/lib/iris/etc/palette/diverging/RdBu_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: diverging # keyword: anomaly, temperature # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.019608 0.188235 0.380392 diff --git a/lib/iris/etc/palette/diverging/RdGy_11.txt b/lib/iris/etc/palette/diverging/RdGy_11.txt index c8ade7f388..0b8ae55480 100644 --- a/lib/iris/etc/palette/diverging/RdGy_11.txt +++ b/lib/iris/etc/palette/diverging/RdGy_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: diverging # keyword: anomaly # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.403922 0.000000 0.121569 diff --git a/lib/iris/etc/palette/diverging/RdYlBu_11.txt b/lib/iris/etc/palette/diverging/RdYlBu_11.txt index 84cc3dd2c2..5d799e8e77 100644 --- a/lib/iris/etc/palette/diverging/RdYlBu_11.txt +++ b/lib/iris/etc/palette/diverging/RdYlBu_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: diverging # keyword: anomaly # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.647059 0.000000 0.149020 diff --git a/lib/iris/etc/palette/diverging/RdYlGn_11.txt b/lib/iris/etc/palette/diverging/RdYlGn_11.txt index f1d626d493..d17ff39177 100644 --- a/lib/iris/etc/palette/diverging/RdYlGn_11.txt +++ b/lib/iris/etc/palette/diverging/RdYlGn_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: diverging # keyword: anomaly # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.647059 0.000000 0.149020 diff --git a/lib/iris/etc/palette/diverging/Spectral_11.txt b/lib/iris/etc/palette/diverging/Spectral_11.txt index 03eaa98f7f..f2a4447846 100644 --- a/lib/iris/etc/palette/diverging/Spectral_11.txt +++ b/lib/iris/etc/palette/diverging/Spectral_11.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: diverging # keyword: anomaly # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.619608 0.003922 0.258824 diff --git a/lib/iris/etc/palette/qualitative/Accent_08.txt b/lib/iris/etc/palette/qualitative/Accent_08.txt index 9b1f3c49c3..bb13fcb5a9 100644 --- a/lib/iris/etc/palette/qualitative/Accent_08.txt +++ b/lib/iris/etc/palette/qualitative/Accent_08.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Accent_08 # scheme: qualitative # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.498039 0.788235 0.498039 diff --git a/lib/iris/etc/palette/qualitative/Dark2_08.txt b/lib/iris/etc/palette/qualitative/Dark2_08.txt index bd33878e10..5d1599a31b 100644 --- a/lib/iris/etc/palette/qualitative/Dark2_08.txt +++ b/lib/iris/etc/palette/qualitative/Dark2_08.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Dark2_08 # scheme: qualitative # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.105882 0.619608 0.466667 diff --git a/lib/iris/etc/palette/qualitative/Paired_12.txt b/lib/iris/etc/palette/qualitative/Paired_12.txt index b4efea1c92..8b8154ff3b 100644 --- a/lib/iris/etc/palette/qualitative/Paired_12.txt +++ b/lib/iris/etc/palette/qualitative/Paired_12.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Paired_12 # scheme: qualitative # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.650980 0.807843 0.890196 diff --git a/lib/iris/etc/palette/qualitative/Pastel1_09.txt b/lib/iris/etc/palette/qualitative/Pastel1_09.txt index fb6ef6d0a8..042009995e 100644 --- a/lib/iris/etc/palette/qualitative/Pastel1_09.txt +++ b/lib/iris/etc/palette/qualitative/Pastel1_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Pastel1_09 # scheme: qualitative # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.984314 0.705882 0.682353 diff --git a/lib/iris/etc/palette/qualitative/Pastel2_08.txt b/lib/iris/etc/palette/qualitative/Pastel2_08.txt index 1fda519549..05b30fef0b 100644 --- a/lib/iris/etc/palette/qualitative/Pastel2_08.txt +++ b/lib/iris/etc/palette/qualitative/Pastel2_08.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Pastel2_08 # scheme: qualitative # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.701961 0.886275 0.803922 diff --git a/lib/iris/etc/palette/qualitative/Set1_09.txt b/lib/iris/etc/palette/qualitative/Set1_09.txt index 3dd2145930..a2f9f57c4b 100644 --- a/lib/iris/etc/palette/qualitative/Set1_09.txt +++ b/lib/iris/etc/palette/qualitative/Set1_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Set1_09 # scheme: qualitative # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.894118 0.101961 0.109804 diff --git a/lib/iris/etc/palette/qualitative/Set2_08.txt b/lib/iris/etc/palette/qualitative/Set2_08.txt index a643828a22..40cfa0d738 100644 --- a/lib/iris/etc/palette/qualitative/Set2_08.txt +++ b/lib/iris/etc/palette/qualitative/Set2_08.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Set2_08 # scheme: qualitative # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.400000 0.760784 0.647059 diff --git a/lib/iris/etc/palette/qualitative/Set3_12.txt b/lib/iris/etc/palette/qualitative/Set3_12.txt index 589352fc60..33bdaf372c 100644 --- a/lib/iris/etc/palette/qualitative/Set3_12.txt +++ b/lib/iris/etc/palette/qualitative/Set3_12.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Set3_12 # scheme: qualitative # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.552941 0.827451 0.780392 diff --git a/lib/iris/etc/palette/sequential/Blues_09.txt b/lib/iris/etc/palette/sequential/Blues_09.txt index 37c7e6082c..d489a46d61 100644 --- a/lib/iris/etc/palette/sequential/Blues_09.txt +++ b/lib/iris/etc/palette/sequential/Blues_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -16,7 +16,7 @@ # scheme: sequential # keyword: lwe_precipitation, convective_precipitation, stratiform_precipitation, precipitation_amount # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.968627 0.984314 1.000000 diff --git a/lib/iris/etc/palette/sequential/BuGn_09.txt b/lib/iris/etc/palette/sequential/BuGn_09.txt index 28b5bfc11f..cde85b422c 100644 --- a/lib/iris/etc/palette/sequential/BuGn_09.txt +++ b/lib/iris/etc/palette/sequential/BuGn_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: BuGn_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.968627 0.988235 0.992157 diff --git a/lib/iris/etc/palette/sequential/BuPu_09.txt b/lib/iris/etc/palette/sequential/BuPu_09.txt index 6e0596ec80..99fafcc208 100644 --- a/lib/iris/etc/palette/sequential/BuPu_09.txt +++ b/lib/iris/etc/palette/sequential/BuPu_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: BuPu_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.968627 0.988235 0.992157 diff --git a/lib/iris/etc/palette/sequential/GnBu_09.txt b/lib/iris/etc/palette/sequential/GnBu_09.txt index 0225f496e7..cb3fa28c4a 100644 --- a/lib/iris/etc/palette/sequential/GnBu_09.txt +++ b/lib/iris/etc/palette/sequential/GnBu_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: GnBu_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.968627 0.988235 0.941176 diff --git a/lib/iris/etc/palette/sequential/Greens_09.txt b/lib/iris/etc/palette/sequential/Greens_09.txt index 8900459b58..e338e6b9b0 100644 --- a/lib/iris/etc/palette/sequential/Greens_09.txt +++ b/lib/iris/etc/palette/sequential/Greens_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Greens_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.968627 0.988235 0.960784 diff --git a/lib/iris/etc/palette/sequential/Greys_09.txt b/lib/iris/etc/palette/sequential/Greys_09.txt index d76ec0e6a1..b8d047938f 100644 --- a/lib/iris/etc/palette/sequential/Greys_09.txt +++ b/lib/iris/etc/palette/sequential/Greys_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Greys_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 1.000000 1.000000 diff --git a/lib/iris/etc/palette/sequential/OrRd_09.txt b/lib/iris/etc/palette/sequential/OrRd_09.txt index 3e081719a7..3da55efc6f 100644 --- a/lib/iris/etc/palette/sequential/OrRd_09.txt +++ b/lib/iris/etc/palette/sequential/OrRd_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: OrRd_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 0.968627 0.925490 diff --git a/lib/iris/etc/palette/sequential/Oranges_09.txt b/lib/iris/etc/palette/sequential/Oranges_09.txt index 022be59e99..d5793340aa 100644 --- a/lib/iris/etc/palette/sequential/Oranges_09.txt +++ b/lib/iris/etc/palette/sequential/Oranges_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Oranges_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 0.960784 0.921569 diff --git a/lib/iris/etc/palette/sequential/PuBuGn_09.txt b/lib/iris/etc/palette/sequential/PuBuGn_09.txt index 264289cdba..38740edcf5 100644 --- a/lib/iris/etc/palette/sequential/PuBuGn_09.txt +++ b/lib/iris/etc/palette/sequential/PuBuGn_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: PuBuGn_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 0.968627 0.984314 diff --git a/lib/iris/etc/palette/sequential/PuBu_09.txt b/lib/iris/etc/palette/sequential/PuBu_09.txt index e07b960f0b..44f6c6f01b 100644 --- a/lib/iris/etc/palette/sequential/PuBu_09.txt +++ b/lib/iris/etc/palette/sequential/PuBu_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: PuBu_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 0.968627 0.984314 diff --git a/lib/iris/etc/palette/sequential/PuRd_09.txt b/lib/iris/etc/palette/sequential/PuRd_09.txt index 19589fb4ad..402584291a 100644 --- a/lib/iris/etc/palette/sequential/PuRd_09.txt +++ b/lib/iris/etc/palette/sequential/PuRd_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: PuRd_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.968627 0.956863 0.976471 diff --git a/lib/iris/etc/palette/sequential/Purples_09.txt b/lib/iris/etc/palette/sequential/Purples_09.txt index 338dd3d021..1b5811271c 100644 --- a/lib/iris/etc/palette/sequential/Purples_09.txt +++ b/lib/iris/etc/palette/sequential/Purples_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Purples_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 0.988235 0.984314 0.992157 diff --git a/lib/iris/etc/palette/sequential/RdPu_09.txt b/lib/iris/etc/palette/sequential/RdPu_09.txt index a21e4b9000..71054bf397 100644 --- a/lib/iris/etc/palette/sequential/RdPu_09.txt +++ b/lib/iris/etc/palette/sequential/RdPu_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: RdPu_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 0.968627 0.952941 diff --git a/lib/iris/etc/palette/sequential/Reds_09.txt b/lib/iris/etc/palette/sequential/Reds_09.txt index d03594e058..445001df64 100644 --- a/lib/iris/etc/palette/sequential/Reds_09.txt +++ b/lib/iris/etc/palette/sequential/Reds_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: Reds_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 0.960784 0.941176 diff --git a/lib/iris/etc/palette/sequential/YlGnBu_09.txt b/lib/iris/etc/palette/sequential/YlGnBu_09.txt index 733d1aebca..e1699f1f5b 100644 --- a/lib/iris/etc/palette/sequential/YlGnBu_09.txt +++ b/lib/iris/etc/palette/sequential/YlGnBu_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: YlGnBu_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 1.000000 0.850980 diff --git a/lib/iris/etc/palette/sequential/YlGn_09.txt b/lib/iris/etc/palette/sequential/YlGn_09.txt index 1242b90d59..efbecd5ea7 100644 --- a/lib/iris/etc/palette/sequential/YlGn_09.txt +++ b/lib/iris/etc/palette/sequential/YlGn_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: YlGn_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 1.000000 0.898039 diff --git a/lib/iris/etc/palette/sequential/YlOrBr_09.txt b/lib/iris/etc/palette/sequential/YlOrBr_09.txt index f6d93c5b9a..caf8886f83 100644 --- a/lib/iris/etc/palette/sequential/YlOrBr_09.txt +++ b/lib/iris/etc/palette/sequential/YlOrBr_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: YlOrBr_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 1.000000 0.898039 diff --git a/lib/iris/etc/palette/sequential/YlOrRd_09.txt b/lib/iris/etc/palette/sequential/YlOrRd_09.txt index b252f6f684..4d50f5034d 100644 --- a/lib/iris/etc/palette/sequential/YlOrRd_09.txt +++ b/lib/iris/etc/palette/sequential/YlOrRd_09.txt @@ -5,7 +5,7 @@ # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR @@ -15,7 +15,7 @@ # name: YlOrRd_09 # scheme: sequential # interpolate: off -# source: http://colorbrewer.org/ +# source: https://colorbrewer.org/ # type: RGB # 1.000000 1.000000 0.800000 diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 6fe12ea82a..5cc3b4f710 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -6,7 +6,7 @@ Experimental module for importing/exporting raster data from Iris cubes using the GDAL library. -See also: `GDAL - Geospatial Data Abstraction Library `_. +See also: `GDAL - Geospatial Data Abstraction Library `_. TODO: If this module graduates from experimental the (optional) GDAL dependency should be added to INSTALL @@ -120,7 +120,7 @@ def export_geotiff(cube, fname): .. note:: For more details on GeoTiff specification and PixelIsArea, see: - http://www.remotesensing.org/geotiff/spec/geotiff2.5.html#2.5.2.2 + https://www.remotesensing.org/geotiff/spec/geotiff2.5.html#2.5.2.2 """ wmsg = ( diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index 83e65f89af..ccea4277d3 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -105,7 +105,7 @@ def _make_esmpy_field( # NOTE: we don't care about Iris' idea of where the points 'really' are # *but* ESMF requires the data in the CENTER for conservative regrid, # according to the documentation : - # - http://www.earthsystemmodeling.org/ + # - https://www.earthsystemmodeling.org/ # esmf_releases/public/last/ESMF_refdoc.pdf # - section 22.2.3 : ESMF_REGRIDMETHOD # diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index b0bff313e9..5bfc8754fb 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -612,7 +612,7 @@ def write( using `numpy.around(scale*data)/scale`, where `scale = 2**bits`, and `bits` is determined so that a precision of 0.1 is retained (in this case `bits=4`). From - `here `__: + `here `__: "least_significant_digit -- power of ten of the smallest decimal place in unpacked data that is a reliable value". Default is `None`, or no quantization, or 'lossless' compression. @@ -2765,7 +2765,7 @@ def save( Used to manually specify the HDF5 chunksizes for each dimension of the variable. A detailed discussion of HDF chunking and I/O performance is available - `here `__. + `here `__. Basically, you want the chunk size for each dimension to match as closely as possible the size of the data block that users will read from the file. `chunksizes` cannot be set if `contiguous=True`. @@ -2795,7 +2795,7 @@ def save( of packing parameters as described below or an iterable of such types, strings, or dicts. This provides support for netCDF data packing as described in - `here `__ + `here `__ If this argument is a type (or type string), appropriate values of scale_factor and add_offset will be automatically calculated based on `cube.data` and possible masking. For more control, pass a dict with diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index d4e86502bd..f4033cfb1d 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -173,7 +173,7 @@ class NimrodField: References: Met Office (2003): Met Office Rain Radar Data from the NIMROD System. NCAS British Atmospheric Data Centre, date of citation. - http://catalogue.ceda.ac.uk/uuid/82adec1f896af6169112d09cc1174499 + https://catalogue.ceda.ac.uk/uuid/82adec1f896af6169112d09cc1174499 """ diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 87725789e5..08586c81b7 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -99,8 +99,8 @@ def decode_uri(uri, default="file"): Examples -------- >>> from iris.io import decode_uri - >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b')) - ('http', '//www.thing.com:8080/resource?id=a:b') + >>> print(decode_uri('https://www.thing.com:8080/resource?id=a:b')) + ('https', '//www.thing.com:8080/resource?id=a:b') >>> print(decode_uri('file:///data/local/dataZoo/...')) ('file', '///data/local/dataZoo/...') @@ -127,7 +127,7 @@ def decode_uri(uri, default="file"): if isinstance(uri, str): # make sure scheme has at least 2 letters to avoid windows drives # put - last in the brackets so it refers to the character, not a range - # reference on valid schemes: http://tools.ietf.org/html/std66#section-3.1 + # reference on valid schemes: https://tools.ietf.org/html/std66#section-3.1 match = re.match(r"^([a-zA-Z][a-zA-Z0-9+.-]+):(.+)", uri) if match: scheme = match.group(1) diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 535bed3a64..05447c0c48 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -5,7 +5,7 @@ """ Provide conversion to and from Pandas data structures. -See also: http://pandas.pydata.org/ +See also: https://pandas.pydata.org/ """ import datetime diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index 7bbbca83a9..2a8f447f3a 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -231,7 +231,7 @@ def _wedge_fix(wedge_path): """ A dictionary mapping WMO cloud cover codes to their corresponding symbol. -See http://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf +See https://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf Part II, Appendix II.4, Graphical Representation of Data, Analyses and Forecasts diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index 852944eee5..57b6c2a963 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -34,9 +34,9 @@ def test_decode_uri__str(self): uri[:4], uri[5:], ), - (uri := "http://www.somehost.com:8080/resource/thing.grib"): ( - uri[:4], - uri[5:], + (uri := "https://www.somehost.com:8080/resource/thing.grib"): ( + uri[:5], + uri[6:], ), (uri := "/data/local/someDir/2013-11-25T13:49:17.632797"): ( "file", @@ -151,7 +151,7 @@ def test_open_dap(self): # tests that *ANY* http or https URL is seen as an OPeNDAP service. # This may need to change in the future if other protocols are # supported. - DAP_URI = "http://geoport.whoi.edu/thredds/dodsC/bathy/gom15" + DAP_URI = "https://geoport.whoi.edu/thredds/dodsC/bathy/gom15" a = iff.FORMAT_AGENT.get_spec(DAP_URI, None) self.assertEqual(a.name, "NetCDF OPeNDAP") diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 1189f74b55..93ea4ef913 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -151,7 +151,7 @@ def test_path_object(self): class TestOPeNDAP(tests.IrisTest): def setUp(self): - self.url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" + self.url = "https://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" def test_load_http_called(self): # Check that calling iris.load_* with an http URI triggers a call to diff --git a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py index ae0e47292d..b83278c3b0 100644 --- a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py +++ b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py @@ -139,7 +139,7 @@ def test_distinct_xy_bounds_pole(self): miny = 84.99998474121094 maxy = 99.99998474121094 geometry = shapely.geometry.box(minx, miny, maxx, maxy) - # see http://stackoverflow.com/a/3892301 to assert warnings + # see https://stackoverflow.com/a/3892301 to assert warnings with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # always trigger all warnings weights = geometry_area_weights(cube, geometry) diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py index 1ec3e65a97..40d839951b 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py @@ -210,13 +210,13 @@ def setUp(self): ) def test_http(self): - url = "http://foo" + url = "https://foo" with PARSE_UGRID_ON_LOAD.context(): _ = load_meshes(url) self.format_agent_mock.assert_called_with(url, None) def test_mixed_sources(self): - url = "http://foo" + url = "https://foo" file = TMP_DIR / f"{uuid4()}.nc" file.touch() glob = f"{TMP_DIR}/*.nc" diff --git a/lib/iris/tests/unit/time/test_PartialDateTime.py b/lib/iris/tests/unit/time/test_PartialDateTime.py index 8223f4a518..6ed00943b9 100644 --- a/lib/iris/tests/unit/time/test_PartialDateTime.py +++ b/lib/iris/tests/unit/time/test_PartialDateTime.py @@ -57,7 +57,7 @@ def test_empty(self): class Test_timetuple(tests.IrisTest): def test_exists(self): # Check that the PartialDateTime class implements a timetuple (needed - # because of http://bugs.python.org/issue8005). + # because of https://bugs.python.org/issue8005). pd = PartialDateTime(*list(range(7))) self.assertTrue(hasattr(pd, "timetuple")) diff --git a/lib/iris/time.py b/lib/iris/time.py index 6ba85a0051..f2bc4a08ce 100644 --- a/lib/iris/time.py +++ b/lib/iris/time.py @@ -42,7 +42,7 @@ class PartialDateTime: #: A dummy value provided as a workaround to allow comparisons with #: :class:`datetime.datetime`. - #: See http://bugs.python.org/issue8005. + #: See https://bugs.python.org/issue8005. #: NB. It doesn't even matter what this value is. timetuple = None diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 8e3b24aac6..85372b7cc7 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -12,8 +12,8 @@ By default, Iris will use the source XML file: etc/cf-standard-name-table.xml as obtained from: - http://cfconventions.org/standard-names.html - E.G. http://cfconventions.org/Data/cf-standard-names/78/src/cf-standard-name-table.xml + https://cfconventions.org/standard-names.html + E.G. https://cfconventions.org/Data/cf-standard-names/78/src/cf-standard-name-table.xml - N.B. no fixed 'latest' url is provided. """ diff --git a/tools/release_do_nothing.py b/tools/release_do_nothing.py index 94f2d96829..634e6a65a2 100755 --- a/tools/release_do_nothing.py +++ b/tools/release_do_nothing.py @@ -186,7 +186,7 @@ def update_standard_names(first_in_series: bool) -> None: "(This is used during build to automatically generate the sourcefile " "``lib/iris/std_names.py``).\n" "Latest standard names:\n" - 'wget "http://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml";' + 'wget "https://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml";' ) _wait_for_done(message) From c11d8bbffa65e3bc8ed2306b6fd6099984ed8ba1 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 11 Dec 2023 17:14:26 +0000 Subject: [PATCH 104/134] Adopt inital noop ruff linter (#5623) * Adopt inital noop ruff linter * use extend-exclude * add whatsnew entry --- .pre-commit-config.yaml | 7 + .ruff.toml | 240 +++++++++++++++++++++++++++++++++++ MANIFEST.in | 1 + README.md | 18 +-- docs/src/whatsnew/latest.rst | 3 + pyproject.toml | 31 +++++ 6 files changed, 291 insertions(+), 9 deletions(-) create mode 100644 .ruff.toml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cbad42b83a..cd52482df0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,6 +28,13 @@ repos: # Don't commit to main branch. - id: no-commit-to-branch +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.1.6" + hooks: + - id: ruff + types: [file, python] + args: [--fix, --show-fixes] + - repo: https://github.com/codespell-project/codespell rev: "v2.2.6" hooks: diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000000..f3f9f1d3f0 --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,240 @@ +extend = "pyproject.toml" + +lint.ignore = [ + # NOTE: To find a rule code to fix, run: + # ruff --select="ALL" --statistics lib/iris/ + + # Pyflakes (F) + # https://docs.astral.sh/ruff/rules/#pyflakes-f + "F", + + # pycodestyle (E, W) + # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w + "E", + "W", + + # mccabe (C90) + # https://docs.astral.sh/ruff/rules/#mccabe-c90 + "C90", + + # isort (I) + # https://docs.astral.sh/ruff/rules/#isort-i + "I", + + # pep8-naming (N) + # https://docs.astral.sh/ruff/rules/#pep8-naming-n + "N", + + # pydocstyle (D) + # https://docs.astral.sh/ruff/rules/#pydocstyle-d + "D", + + # pyupgrade (UP) + # https://docs.astral.sh/ruff/rules/#pyupgrade-up + "UP", + + # flake8-2020 (YTT) + # https://docs.astral.sh/ruff/rules/#flake8-2020-ytt + "YTT", + + # flake8-annotations (ANN) + # https://docs.astral.sh/ruff/rules/#flake8-annotations-ann + "ANN", + + # flake8-async (ASYNC) + # https://docs.astral.sh/ruff/rules/#flake8-async-async + "ASYNC", + + # flake8-trio (TRIO) + # https://docs.astral.sh/ruff/rules/#flake8-trio-trio + "TRIO", + + # flake8-bandit (S) + # https://docs.astral.sh/ruff/rules/#flake8-bandit-s + "S", + + # flake8-blind-except (BLE) + # https://docs.astral.sh/ruff/rules/#flake8-blind-except-ble + "BLE", + + # flake8-boolean-trap (FBT) + # https://docs.astral.sh/ruff/rules/#flake8-boolean-trap-fbt + "FBT", + + # flake8-bugbear (B) + # https://docs.astral.sh/ruff/rules/#flake8-bugbear-b + "B", + + # flake8-builtins (A) + # https://docs.astral.sh/ruff/rules/#flake8-builtins-a + "A", + + # flake8-commas (COM) + # https://docs.astral.sh/ruff/rules/#flake8-commas-com + "COM", + + # flake8-copyright (CPY) + # https://docs.astral.sh/ruff/rules/#flake8-copyright-cpy + "CPY", + + # flake8-comprehensions (C4) + # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 + "C4", + + # flake8-datetimez (DTZ) + # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz + "DTZ", + + # flake8-debugger (T10) + # https://docs.astral.sh/ruff/rules/#flake8-debugger-t10 + "T10", + + # flake8-django (DJ) + # https://docs.astral.sh/ruff/rules/#flake8-django-dj + "DJ", + + # flake8-errmsg (EM) + # https://docs.astral.sh/ruff/rules/#flake8-errmsg-em + "EM", + + # flake8-executable (EXE) + # https://docs.astral.sh/ruff/rules/#flake8-executable-exe + "EXE", + + # flake8-future-annotations (FA) + # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa + "FA", + + # flake8-implicit-str-concat (ISC) + # https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc + "ISC", + + # flake8-import-conventions (ICN) + # https://docs.astral.sh/ruff/rules/#flake8-import-conventions-icn + "ICN", + + # flake8-logging-format (G) + # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g + "G", + + # flake8-no-pep420 (INP) + # https://docs.astral.sh/ruff/rules/#flake8-no-pep420-inp + "INP", + + # flake8-pie (PIE) + # https://docs.astral.sh/ruff/rules/#flake8-pie-pie + "PIE", + + # flake8-print (T20) + # https://docs.astral.sh/ruff/rules/#flake8-print-t20 + "T20", + + # flake8-pyi (PYI) + # https://docs.astral.sh/ruff/rules/#flake8-pyi-pyi + "PYI", + + # flake8-pytest-style (PT) + # https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt + "PT", + + # flake8-quotes (Q) + # https://docs.astral.sh/ruff/rules/#flake8-quotes-q + "Q", + + # flake8-raise (RSE) + # https://docs.astral.sh/ruff/rules/#flake8-raise-rse + "RSE", + + # flake8-return (RET) + # https://docs.astral.sh/ruff/rules/#flake8-return-ret + "RET", + + # flake8-self (SLF) + # https://docs.astral.sh/ruff/rules/#flake8-self-slf + "SLF", + + # flake8-slots (SLOT) + # https://docs.astral.sh/ruff/rules/#flake8-slots-slot + "SLOT", + + # flake8-simplify (SIM) + # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim + "SIM", + + # flake8-tidy-imports (TID) + # https://docs.astral.sh/ruff/rules/#flake8-tidy-imports-tid + "TID", + + # flake8-type-checking (TCH) + # https://docs.astral.sh/ruff/rules/#flake8-type-checking-tch + "TCH", + + # flake8-gettext (INT) + # https://docs.astral.sh/ruff/rules/#flake8-gettext-int + "INT", + + # flake8-unused-arguments (ARG) + # https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg + "ARG", + + # flake8-use-pathlib (PTH) + # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth + "PTH", + + # flake8-todos (TD) + # https://docs.astral.sh/ruff/rules/#flake8-todos-td + "TD", + + # flake8-fixme (FIX) + # https://docs.astral.sh/ruff/rules/#flake8-fixme-fix + "FIX", + + # eradicate (ERA) + # https://docs.astral.sh/ruff/rules/#eradicate-era + "ERA", + + # pandas-vet (PD) + # https://docs.astral.sh/ruff/rules/#pandas-vet-pd + "PD", + + # pygrep-hooks (PGH) + # https://docs.astral.sh/ruff/rules/#pygrep-hooks-pgh + "PGH", + + # Pylint (PL) + # https://docs.astral.sh/ruff/rules/#pylint-pl + "PL", + + # tryceratops (TRY) + # https://docs.astral.sh/ruff/rules/#tryceratops-try + "TRY", + + # flynt (FLY) + # https://docs.astral.sh/ruff/rules/#flynt-fly + "FLY", + + # NumPy-specific rules (NPY) + # https://docs.astral.sh/ruff/rules/#numpy-specific-rules-npy + "NPY", + + # Airflow (AIR) + # https://docs.astral.sh/ruff/rules/#airflow-air + "AIR", + + # Perflint (PERF) + # https://docs.astral.sh/ruff/rules/#perflint-perf + "PERF", + + # refurb (FURB) + # https://docs.astral.sh/ruff/rules/#refurb-furb + "FURB", + + # flake8-logging (LOG) + # https://docs.astral.sh/ruff/rules/#flake8-logging-log + "LOG", + + # Ruff-specific rules (RUF) + # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf + "RUF", +] + diff --git a/MANIFEST.in b/MANIFEST.in index 354b92d735..28eaf30baa 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -14,6 +14,7 @@ exclude .gitignore exclude .mailmap exclude .pre-commit-config.yaml exclude .readthedocs.yml +exclude .ruff.toml exclude CHANGES exclude CODE_OF_CONDUCT.md exclude codecov.yml diff --git a/README.md b/README.md index 233c0edd39..493aa87681 100644 --- a/README.md +++ b/README.md @@ -9,15 +9,15 @@ analysing and visualising Earth science data -| | | -|------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| ⚙️ CI | [![ci-manifest](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml) [![ci-tests](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml) [![ci-wheels](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml) [![pre-commit](https://results.pre-commit.ci/badge/github/SciTools/iris/main.svg)](https://results.pre-commit.ci/latest/github/SciTools/iris/main) | -| 💬 Community | [![Contributor Covenant](https://img.shields.io/badge/contributor%20covenant-2.1-4baaaa.svg)](https://www.contributor-covenant.org/version/2/1/code_of_conduct/) [![GH Discussions](https://img.shields.io/badge/github-discussions%20%F0%9F%92%AC-yellow?logo=github&logoColor=lightgrey)](https://github.com/SciTools/iris/discussions) [![twitter](https://img.shields.io/twitter/follow/scitools_iris?color=yellow&label=twitter%7Cscitools_iris&logo=twitter&style=plastic)](https://twitter.com/scitools_iris) | -| 📖 Documentation | [![rtd](https://readthedocs.org/projects/scitools-iris/badge/?version=latest)](https://scitools-iris.readthedocs.io/en/latest/?badge=latest) | -| 📈 Health | [![codecov](https://codecov.io/gh/SciTools/iris/branch/main/graph/badge.svg?token=0GeICSIF3g)](https://codecov.io/gh/SciTools/iris) | -| ✨ Meta | [![code style - black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/LICENSE) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | -| 📦 Package | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.595182.svg)](https://doi.org/10.5281/zenodo.595182) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/iris?color=orange&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/iris) [![pypi](https://img.shields.io/pypi/v/scitools-iris?color=orange&label=pypi&logo=python&logoColor=white)](https://pypi.org/project/scitools-iris/) [![pypi - python version](https://img.shields.io/pypi/pyversions/scitools-iris.svg?color=orange&logo=python&label=python&logoColor=white)](https://pypi.org/project/scitools-iris/) | -| 🧰 Repo | [![commits-since](https://img.shields.io/github/commits-since/SciTools/iris/latest.svg)](https://github.com/SciTools/iris/commits/main) [![contributors](https://img.shields.io/github/contributors/SciTools/iris)](https://github.com/SciTools/iris/graphs/contributors) [![release](https://img.shields.io/github/v/release/scitools/iris)](https://github.com/SciTools/iris/releases) | +| | | +|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| ⚙️ CI | [![ci-manifest](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml) [![ci-tests](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml) [![ci-wheels](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml) [![pre-commit](https://results.pre-commit.ci/badge/github/SciTools/iris/main.svg)](https://results.pre-commit.ci/latest/github/SciTools/iris/main) | +| 💬 Community | [![Contributor Covenant](https://img.shields.io/badge/contributor%20covenant-2.1-4baaaa.svg)](https://www.contributor-covenant.org/version/2/1/code_of_conduct/) [![GH Discussions](https://img.shields.io/badge/github-discussions%20%F0%9F%92%AC-yellow?logo=github&logoColor=lightgrey)](https://github.com/SciTools/iris/discussions) [![twitter](https://img.shields.io/twitter/follow/scitools_iris?color=yellow&label=twitter%7Cscitools_iris&logo=twitter&style=plastic)](https://twitter.com/scitools_iris) | +| 📖 Documentation | [![rtd](https://readthedocs.org/projects/scitools-iris/badge/?version=latest)](https://scitools-iris.readthedocs.io/en/latest/?badge=latest) | +| 📈 Health | [![codecov](https://codecov.io/gh/SciTools/iris/branch/main/graph/badge.svg?token=0GeICSIF3g)](https://codecov.io/gh/SciTools/iris) | +| ✨ Meta | [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![code style - black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/LICENSE) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | +| 📦 Package | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.595182.svg)](https://doi.org/10.5281/zenodo.595182) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/iris?color=orange&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/iris) [![pypi](https://img.shields.io/pypi/v/scitools-iris?color=orange&label=pypi&logo=python&logoColor=white)](https://pypi.org/project/scitools-iris/) [![pypi - python version](https://img.shields.io/pypi/pyversions/scitools-iris.svg?color=orange&logo=python&label=python&logoColor=white)](https://pypi.org/project/scitools-iris/) | +| 🧰 Repo | [![commits-since](https://img.shields.io/github/commits-since/SciTools/iris/latest.svg)](https://github.com/SciTools/iris/commits/main) [![contributors](https://img.shields.io/github/contributors/SciTools/iris)](https://github.com/SciTools/iris/graphs/contributors) [![release](https://img.shields.io/github/v/release/scitools/iris)](https://github.com/SciTools/iris/releases) | | |

diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 884bfd376a..05bb9ef7b5 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -166,6 +166,8 @@ This document explains the changes made to Iris for this release :doc:`../developers_guide/release_do_nothing`. (:pull:`5515`) +#. `@bjlittle`_ adopted and configured the `ruff`_ linter. (:pull:`5623`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, @@ -181,4 +183,5 @@ This document explains the changes made to Iris for this release .. _NEP29 Drop Schedule: https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule .. _codespell: https://github.com/codespell-project/codespell +.. _ruff: https://github.com/astral-sh/ruff .. _split attributes project: https://github.com/orgs/SciTools/projects/5?pane=info diff --git a/pyproject.toml b/pyproject.toml index 88b39f1601..5d6e232d22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,37 @@ Discussions = "https://github.com/SciTools/iris/discussions" Documentation = "https://scitools-iris.readthedocs.io/en/stable/" Issues = "https://github.com/SciTools/iris/issues" +[tool.ruff] +# Exclude the following, in addition to the standard set of exclusions. +# https://docs.astral.sh/ruff/settings/#exclude +extend-exclude = [ + "_ff_cross_references.py", + "um_cf_map.py", + "docs/src/sphinxext", + "tools", +] +line-length = 79 +src = [ + "benchmarks", + "lib", + "docs/src", +] +target-version = "py39" + +[tool.ruff.lint] +ignore = [ + # NOTE: Non-permanent exclusions should be added to the ".ruff.toml" file. + + # flake8-implicit-str-concat (ISC) + # https://docs.astral.sh/ruff/rules/single-line-implicit-string-concatenation/ + # NOTE: This rule may cause conflicts when used with "ruff format". + "ISC001", # Implicitly concatenate string literals on one line. + ] + preview = false + select = [ + "ALL", + ] + [tool.setuptools] license-files = ["LICENSE"] zip-safe = false From ba57f289e621f1c7d2b9bd8ba5d075aaf1f38921 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Tue, 12 Dec 2023 14:50:18 +0000 Subject: [PATCH 105/134] add ignore for twitter (#5631) --- docs/src/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/src/conf.py b/docs/src/conf.py index ea12b83aaf..e0d8148a54 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -396,6 +396,7 @@ def _dotv(version): "https://www.metoffice.gov.uk/", "https://biggus.readthedocs.io/", "https://stickler-ci.com/", + "https://twitter.com/scitools_iris", ] # list of sources to exclude from the build. From a1e3921f3e2396646dae508d040e9922aee8d141 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Tue, 12 Dec 2023 15:03:35 +0000 Subject: [PATCH 106/134] ensured ruff convention is numpy (#5629) --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 5d6e232d22..805e5bb47c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,6 +89,9 @@ ignore = [ "ALL", ] +[tool.ruff.lint.pydocstyle] +convention = "numpy" + [tool.setuptools] license-files = ["LICENSE"] zip-safe = false From b80ff4dc59f3ef1ca7f399019a2c70f0e1bc8045 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 15:08:28 +0000 Subject: [PATCH 107/134] [CI Bot] environment lockfiles auto-update (#5619) * Updated environment lockfiles * Use https for opendap.org link, as http no longer supported. (#5628) * Use https for opendap.org link, as http no longer supported. * Exclude twitter/scitools_iris from linkcheck, now failing for some unknown reason. --------- Co-authored-by: Lockfile bot Co-authored-by: Patrick Peglar --- requirements/locks/py310-linux-64.lock | 34 +++++++++++++------------- requirements/locks/py311-linux-64.lock | 34 +++++++++++++------------- requirements/locks/py39-linux-64.lock | 34 +++++++++++++------------- 3 files changed, 51 insertions(+), 51 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index f02da60d5c..53ffa7f535 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -81,7 +81,7 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.3-h783c2da_0.conda#9bd06b12bbfa6fd1740fd23af4b0f0c7 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda#f3a8c32aa764c3e7188b4b810fc9d6ce @@ -108,7 +108,7 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda#b38946846cdf39f9bce93f75f571d913 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -116,9 +116,9 @@ https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda#b8d67603d43b23ce7e988a5d81a7ab79 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.5.0-hca28451_0.conda#7144d5a828e2cae218e0e3c98d8a0aeb https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda#b74e07a054c479e45a83a83fc5be713c @@ -126,6 +126,7 @@ https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py310hd41b1 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.1.0-pyhd8ed1ab_0.conda#45a5065664da0d1dfa8f8cd2eaf05ab9 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h2372a71_1.conda#cb25177acf28cc35cfa6c1ac1c679e22 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff @@ -143,7 +144,7 @@ https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 @@ -162,9 +163,9 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f9 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda#45846a970e71ac98fd327da5d40a0a2c https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.conda#33c03cd5711885c920ddff676fb84f98 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py310h2372a71_0.conda#c2dcff257e040bcda00e2a30a9d85333 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.9.0-pyha770c72_0.conda#9677d53e8eb8e3282e9d84c5d0c525d7 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py310h2372a71_0.conda#3c0109417cbcdabfed289360886b036d +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h4f84152_100.conda#d471a5c3abc984b662d9bae3bb7fd8a5 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.0-pyha770c72_0.conda#a941237cd06538837b25cd245fcd25d8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 @@ -173,38 +174,37 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py310h01dd4db_0.conda#95d87a906d88b5824d7d36eeef091dba https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.9.0-hd8ed1ab_0.conda#a0c28e5b7f824a19bd8ee9255c9bd58c +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py310hb13e2d6_0.conda#d3147cfbf72d6ae7bba10562208f6def https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310h32c33b7_4.conda#124211262afed349430d9a3de6b51e8f +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310hd5c30f3_5.conda#dc2ee770a2299307f3c127af79160d25 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hd41b1e2_4.conda#35e87277fba9944b8a975113538bb5df -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py310h1f7b6fc_0.conda#31beda75384647959d5792a1a7dc571a https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py310hd41b1e2_0.conda#85d2aaa7af046528d339da1e813c3a9f -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.0-pyhd8ed1ab_0.conda#95eae0785aed72998493140dc0115382 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py310h2372a71_1.conda#dfcf64f67961eb9686676f96fdb4b4d1 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py310hcc13569_0.conda#30a39c1064e5efc578d83c2a5f7cd749 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py310hcc13569_0.conda#410f7e83992a591e492c25049a859254 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h1f7b6fc_1.conda#be6f0382440ccbf9fb01bb19ab1f1fc0 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py310hb13e2d6_0.conda#f0063b2885bfae11324a00a693f88781 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py310hc3e127f_1.conda#fdaca8d27b3af78d617521eb37b1d055 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h1f7b6fc_4.conda#0ca55ca20891d393846695354b32ebc5 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.0-pyhd8ed1ab_0.conda#22d620e1079e99c34578cb0c615d2789 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 142d50df86..3c655e2192 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -81,7 +81,7 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.3-h783c2da_0.conda#9bd06b12bbfa6fd1740fd23af4b0f0c7 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda#b0dfbe2fcbfdb097d321bfd50ecddab1 @@ -108,7 +108,7 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda#b38946846cdf39f9bce93f75f571d913 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -116,9 +116,9 @@ https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.5.0-hca28451_0.conda#7144d5a828e2cae218e0e3c98d8a0aeb https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_1.conda#71120b5155a0c500826cf81536721a15 @@ -126,6 +126,7 @@ https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py311h9547e https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.1.0-pyhd8ed1ab_0.conda#45a5065664da0d1dfa8f8cd2eaf05ab9 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h459d7ec_1.conda#490d7fa8675afd1aa6f1b2332d156a45 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff @@ -143,7 +144,7 @@ https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 @@ -161,9 +162,9 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f9 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py311h459d7ec_0.conda#5b24692ece82f89e5cb9a469d9619731 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.9.0-pyha770c72_0.conda#9677d53e8eb8e3282e9d84c5d0c525d7 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py311h459d7ec_0.conda#a14114f70e23f7fd5ab9941fec45b095 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h4f84152_100.conda#d471a5c3abc984b662d9bae3bb7fd8a5 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.0-pyha770c72_0.conda#a941237cd06538837b25cd245fcd25d8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 @@ -172,38 +173,37 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py311ha6c5da5_0.conda#83a988daf5c49e57f7d2086fb6781fe8 https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.9.0-hd8ed1ab_0.conda#a0c28e5b7f824a19bd8ee9255c9bd58c +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py311h64a7726_0.conda#fd2f142dcd680413b5ede5d0fb799205 https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_4.conda#75d504c6787edc377ebdba087a26a61b +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311hca0b8b9_5.conda#cac429fcb9126d5e6f02c8ba61c2a811 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py311h1f0f07a_0.conda#b7e6d52b39e199238c3400cafaabafb3 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.0-pyhd8ed1ab_0.conda#95eae0785aed72998493140dc0115382 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py311h459d7ec_1.conda#45b8d355bbcdd27588c2d266bcfdff84 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py311h320fe9a_0.conda#3ea3486e16d559dfcb539070ed330a1e +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py311h320fe9a_0.conda#e44ccb61b6621bf3f8053ae66eba7397 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311h1f0f07a_1.conda#86b71ff85f3e4c8a98b5bace6d9c4565 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py311h64a7726_0.conda#9ac5334f1b5ed072d3dbc342503d7868 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311h2032efe_1.conda#4ba860ff851768615b1a25b788022750 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_4.conda#1e105c1a8ea2163507726144b401eb1b -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.0-pyhd8ed1ab_0.conda#22d620e1079e99c34578cb0c615d2789 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index c43d2d1c61..0b98e25ab8 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -81,7 +81,7 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#00 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.3-h783c2da_0.conda#9bd06b12bbfa6fd1740fd23af4b0f0c7 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 @@ -108,7 +108,7 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda#b38946846cdf39f9bce93f75f571d913 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -116,9 +116,9 @@ https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_1.conda#c9f74d717e5a2847a9f8b779c54130f2 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.5.0-hca28451_0.conda#7144d5a828e2cae218e0e3c98d8a0aeb https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_1.conda#ee2b4665b852ec6ff2758f3c1b91233d @@ -126,6 +126,7 @@ https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py39h7633fe https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.1.0-pyhd8ed1ab_0.conda#45a5065664da0d1dfa8f8cd2eaf05ab9 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39hd1e30aa_1.conda#c2e412b0f11e5983bcfc35d9beb91ecb https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff @@ -143,7 +144,7 @@ https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_1.conda#cbe186eefb0bcd91e8f47c3908489874 @@ -160,9 +161,9 @@ https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda#ac992767d7f8ed2cb27e71e78f0fb2d7 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_1.conda#e5b62f0c1f96413116f16d33973f1a44 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py39hd1e30aa_0.conda#616bc0b442acefebdbe97c7b885d771e -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.9.0-pyha770c72_0.conda#9677d53e8eb8e3282e9d84c5d0c525d7 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py39hd1e30aa_0.conda#9b58e5973dd3d786253f4ca9534b1aba +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h4f84152_100.conda#d471a5c3abc984b662d9bae3bb7fd8a5 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.0-pyha770c72_0.conda#a941237cd06538837b25cd245fcd25d8 https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 @@ -172,38 +173,37 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py39had0adad_0.conda#eeaa413fddccecb2ab7f747bdb55b07f https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece +https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.9.0-hd8ed1ab_0.conda#a0c28e5b7f824a19bd8ee9255c9bd58c +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py39h474f0d3_0.conda#459a58eda3e74dd5e3d596c618e7f20a https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39hce394fd_4.conda#4b6e79000ec3a495f429b2c1092ed63b +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39h15b0fa6_5.conda#85e186c7ff673b0d0026782ec353fb2a https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39h7633fee_4.conda#b66595fbda99771266f042f42c7457be -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py39h44dd56e_0.conda#baea2f5dfb3ab7b1c836385d2e1daca7 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39h7633fee_0.conda#ed71ad3e30eb03da363fb797419cce98 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.0-pyhd8ed1ab_0.conda#95eae0785aed72998493140dc0115382 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py39hd1e30aa_1.conda#ca63612907462c8e36edcc9bbacc253e https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py39hddac248_0.conda#961b398d8c421a3752e26f01f2dcbdac +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py39hddac248_0.conda#dcfd2f15c6f8f0bbf234412b18a2a5d0 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h44dd56e_1.conda#d037c20e3da2e85f03ebd20ad480c359 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py39h474f0d3_0.conda#4b401c1516417b4b14aa1249d2f7929d https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py39h6404dd3_1.conda#05623249055d99c51cde021b525611db https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h44dd56e_4.conda#81310d21bf9d91754c1220c585bb72d6 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.0-pyhd8ed1ab_0.conda#22d620e1079e99c34578cb0c615d2789 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a From cad46ef62c011eeb9c678379845df21e7a4377fb Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 15:28:24 +0000 Subject: [PATCH 108/134] [pre-commit.ci] pre-commit autoupdate (#5624) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.6 → v0.1.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.6...v0.1.7) - [github.com/pycqa/isort: 5.12.0 → 5.13.0](https://github.com/pycqa/isort/compare/5.12.0...5.13.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Patrick Peglar --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cd52482df0..cc6579a536 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.1.6" + rev: "v0.1.7" hooks: - id: ruff types: [file, python] @@ -56,7 +56,7 @@ repos: types: [file, python] - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.0 hooks: - id: isort types: [file, python] From 2da5de9657213003218ffde39b2bbe7eedac984a Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Tue, 12 Dec 2023 16:39:06 +0100 Subject: [PATCH 109/134] Add whatsnew entries (#5626) --- docs/src/whatsnew/latest.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 05bb9ef7b5..57fe376947 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -92,6 +92,9 @@ This document explains the changes made to Iris for this release #. `@stephenworsley`_ improved the speed of :class:`~iris.analysis.AreaWeighted` regridding. (:pull:`5543`) +#. `@bouweandela`_ made :func:`iris.util.array_equal` faster when comparing + lazy data from file. This will also speed up coordinate comparison. + (:pull:`5610`) 🔥 Deprecations =============== @@ -129,6 +132,7 @@ This document explains the changes made to Iris for this release saving and loading, :ref:`netcdf_io` with a section on chunking, and placeholders for further topics. (:pull:`5588`) +#. `@bouweandela`_ updated all hyperlinks to https. (:pull:`5621`) 💼 Internal =========== From dcff29dd9d8e1842b9987301f28462181d2892e0 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Tue, 12 Dec 2023 15:50:17 +0000 Subject: [PATCH 110/134] ruff checks for numpydocs (#5630) * wip * ruff compliant. * enabled ignore --- .ruff.toml | 8 ++ lib/iris/config.py | 18 ++--- lib/iris/fileformats/netcdf/_dask_locks.py | 9 +-- lib/iris/fileformats/netcdf/loader.py | 33 +++----- lib/iris/fileformats/netcdf/saver.py | 94 +++++++--------------- lib/iris/io/__init__.py | 27 +++---- 6 files changed, 70 insertions(+), 119 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index f3f9f1d3f0..0702e77757 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -28,6 +28,14 @@ lint.ignore = [ # pydocstyle (D) # https://docs.astral.sh/ruff/rules/#pydocstyle-d "D", + # Permanent + "D105", # Missing docstring in magic method + + # Temporary, to be removed when we are more compliant + "D417", # Missing argument descriptions in the docstring + "D101", # Missing docstring in public class + "D102", # Missing docstring in public method + "D106", # Missing docstring in public nested class # pyupgrade (UP) # https://docs.astral.sh/ruff/rules/#pyupgrade-up diff --git a/lib/iris/config.py b/lib/iris/config.py index 22fb93a06a..25aeffdb33 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides access to Iris-specific configuration values. +"""Provides access to Iris-specific configuration values. The default configuration values can be overridden by creating the file ``iris/etc/site.cfg``. If it exists, this file must conform to the format @@ -42,8 +41,7 @@ def get_logger( name, datefmt=None, fmt=None, level=None, propagate=None, handler=True ): - """ - Create a custom class for logging. + """Create a custom class for logging. Create a :class:`logging.Logger` with a :class:`logging.StreamHandler` and custom :class:`logging.Formatter`. @@ -114,8 +112,7 @@ def get_logger( # Returns simple string options def get_option(section, option, default=None): - """ - Return the option value for the given section. + """Return the option value for the given section. Returns the option value for the given section, or the default value if the section/option is not present. @@ -129,8 +126,7 @@ def get_option(section, option, default=None): # Returns directory path options def get_dir_option(section, option, default=None): - """ - Return the directory path from the given option and section. + """Return the directory path from the given option and section. Returns the directory path from the given option and section, or returns the given default value if the section/option is not present @@ -194,8 +190,7 @@ class NetCDF: """Control Iris NetCDF options.""" def __init__(self, conventions_override=None): - """ - Set up NetCDF processing options for Iris. + """Set up NetCDF processing options for Iris. Parameters ---------- @@ -274,8 +269,7 @@ def _defaults_dict(self): @contextlib.contextmanager def context(self, **kwargs): - """ - Allow temporary modification of the options via a context manager. + """Allow temporary modification of the options via a context manager. Accepted kwargs are the same as can be supplied to the Option. diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py index 82edbf202e..eb60afcf8a 100644 --- a/lib/iris/fileformats/netcdf/_dask_locks.py +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Module containing code to create locks enabling dask workers to co-operate. +"""Module containing code to create locks enabling dask workers to co-operate. This matter is complicated by needing different solutions for different dask scheduler types, i.e. local 'threads' scheduler, local 'processes' or @@ -81,8 +80,7 @@ def dask_scheduler_is_distributed(): def get_dask_array_scheduler_type(): - """ - Work out what type of scheduler an array.compute*() will use. + """Work out what type of scheduler an array.compute*() will use. Returns one of 'distributed', 'threads' or 'processes'. The return value is a valid argument for dask.config.set(scheduler=). @@ -117,8 +115,7 @@ def get_dask_array_scheduler_type(): def get_worker_lock(identity: str): - """ - Return a mutex Lock which can be shared by multiple Dask workers. + """Return a mutex Lock which can be shared by multiple Dask workers. The type of Lock generated depends on the dask scheduler type, which must therefore be set up before this is called. diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 84e04c1589..1488c0afd3 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation. +"""Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation. See : `NetCDF User's Guide `_ and `netCDF4 python module `_. @@ -159,8 +158,7 @@ def _set_attributes(attributes, key, value): def _add_unused_attributes(iris_object, cf_var): - """ - Populate the attributes of a cf element with the "unused" attributes. + """Populate the attributes of a cf element with the "unused" attributes. Populate the attributes of a cf element with the "unused" attributes from the associated CF-netCDF variable. That is, all those that aren't CF @@ -200,8 +198,7 @@ def _get_actual_dtype(cf_var): def _get_cf_var_data(cf_var, filename): - """ - Get an array representing the data of a CF variable. + """Get an array representing the data of a CF variable. This is typically a lazy array based around a NetCDFDataProxy, but if the variable is "sufficiently small", we instead fetch the data as a real (numpy) array. @@ -292,8 +289,8 @@ def _get_cf_var_data(cf_var, filename): class _OrderedAddableList(list): - """ - A custom container object for actions recording. + """A custom container object for actions recording. + Used purely in actions debugging, to accumulate a record of which actions were activated. @@ -521,8 +518,7 @@ def coord_from_term(term): def _translate_constraints_to_var_callback(constraints): - """ - Translate load constraints into a simple data-var filter function, if possible. + """Translate load constraints into a simple data-var filter function, if possible. Returns ------- @@ -566,8 +562,7 @@ def inner(cf_datavar): def load_cubes(file_sources, callback=None, constraints=None): - """ - Load cubes from a list of NetCDF filenames/OPeNDAP URLs. + """Load cubes from a list of NetCDF filenames/OPeNDAP URLs. Parameters ---------- @@ -578,6 +573,8 @@ def load_cubes(file_sources, callback=None, constraints=None): callback : function, optional Function which can be passed on to :func:`iris.io.run_callback`. + constraints : optional + Returns ------- Generator of loaded NetCDF :class:`iris.cube.Cube`. @@ -678,8 +675,7 @@ class Modes(Enum): AS_DASK = auto() def __init__(self, var_dim_chunksizes=None): - """ - Provide user control of Dask chunking. + """Provide user control of Dask chunking. The NetCDF loader is controlled by the single instance of this: the :data:`~iris.fileformats.netcdf.loader.CHUNK_CONTROL` object. @@ -709,8 +705,7 @@ def set( var_names: Union[str, Iterable[str]] = None, **dimension_chunksizes: Mapping[str, int], ) -> None: - """ - Control the Dask chunk sizes applied to NetCDF variables during loading. + r"""Control the Dask chunk sizes applied to NetCDF variables during loading. Parameters ---------- @@ -784,8 +779,7 @@ def set( @contextmanager def from_file(self) -> None: - """ - Ensures the chunk sizes are loaded in from NetCDF file variables. + r"""Ensure the chunk sizes are loaded in from NetCDF file variables. Raises ------ @@ -808,8 +802,7 @@ def from_file(self) -> None: @contextmanager def as_dask(self) -> None: - """ - Relies on Dask :external+dask:doc:`array` to control chunk sizes. + """Relies on Dask :external+dask:doc:`array` to control chunk sizes. Notes ----- diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 5bfc8754fb..3c154b8511 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Module to support the saving of Iris cubes to a NetCDF file. +"""Module to support the saving of Iris cubes to a NetCDF file. Module to support the saving of Iris cubes to a NetCDF file, also using the CF conventions for metadata interpretation. @@ -186,7 +185,6 @@ def append(self, name, coord): ---------- name: CF name of the associated coordinate. - coord: The coordinate of the associated CF name. @@ -248,8 +246,7 @@ def coord(self, name): def _bytes_if_ascii(string): - """ - Convert string to a byte string (str in py2k, bytes in py3k). + """Convert string to a byte string (str in py2k, bytes in py3k). Convert the given string to a byte string (str in py2k, bytes in py3k) if the given string can be encoded to ascii, else maintain the type @@ -268,8 +265,7 @@ def _bytes_if_ascii(string): def _setncattr(variable, name, attribute): - """ - Put the given attribute on the given netCDF4 Data type. + """Put the given attribute on the given netCDF4 Data type. Put the given attribute on the given netCDF4 Data type, casting attributes as we go to bytes rather than unicode. @@ -293,8 +289,7 @@ def _setncattr(variable, name, attribute): def _data_fillvalue_check(arraylib, data, check_value): - """ - Check whether an array is masked, and whether it contains a fill-value. + """Check whether an array is masked, and whether it contains a fill-value. Parameters ---------- @@ -331,8 +326,7 @@ class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning): def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): - """ - Work out whether there was a possible or actual fill-value collision. + """Work out whether there was a possible or actual fill-value collision. From the given information, work out whether there was a possible or actual fill-value collision, and if so construct a warning. @@ -390,8 +384,7 @@ class Saver: """A manager for saving netcdf files.""" def __init__(self, filename, netcdf_format, compute=True): - """ - Manage saving netcdf files. + """Manage saving netcdf files. Parameters ---------- @@ -549,8 +542,7 @@ def write( packing=None, fill_value=None, ): - """ - Wrap for saving cubes to a NetCDF file. + """Wrap for saving cubes to a NetCDF file. Parameters ---------- @@ -854,8 +846,7 @@ def _create_cf_dimensions( self._dataset.createDimension(dim_name, size) def _add_mesh(self, cube_or_mesh): - """ - Add the cube's mesh, and all related variables to the dataset. + """Add the cube's mesh, and all related variables to the dataset. Add the cube's mesh, and all related variables to the dataset. Includes all the mesh-element coordinate and connectivity variables. @@ -991,8 +982,7 @@ def _add_mesh(self, cube_or_mesh): def _add_inner_related_vars( self, cube, cf_var_cube, dimension_names, coordlike_elements ): - """ - Create a set of variables for aux-coords, ancillaries or cell-measures. + """Create a set of variables for aux-coords, ancillaries or cell-measures. Create a set of variables for aux-coords, ancillaries or cell-measures, and attach them to the parent data variable. @@ -1037,8 +1027,7 @@ def _add_inner_related_vars( _setncattr(cf_var_cube, role_attribute_name, variable_names) def _add_aux_coords(self, cube, cf_var_cube, dimension_names): - """ - Add aux. coordinate to the dataset and associate with the data variable. + """Add aux. coordinate to the dataset and associate with the data variable. Parameters ---------- @@ -1078,8 +1067,7 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names): ) def _add_cell_measures(self, cube, cf_var_cube, dimension_names): - """ - Add cell measures to the dataset and associate with the data variable. + """Add cell measures to the dataset and associate with the data variable. Parameters ---------- @@ -1098,8 +1086,7 @@ def _add_cell_measures(self, cube, cf_var_cube, dimension_names): ) def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names): - """ - Add ancillary variables measures to the dataset and associate with the data variable. + """Add ancillary variables measures to the dataset and associate with the data variable. Parameters ---------- @@ -1118,8 +1105,7 @@ def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names): ) def _add_dim_coords(self, cube, dimension_names): - """ - Add coordinate variables to NetCDF dataset. + """Add coordinate variables to NetCDF dataset. Parameters ---------- @@ -1139,8 +1125,7 @@ def _add_dim_coords(self, cube, dimension_names): self._name_coord_map.append(cf_name, coord) def _add_aux_factories(self, cube, cf_var_cube, dimension_names): - """ - Represent the presence of dimensionless vertical coordinates. + """Represent the presence of dimensionless vertical coordinates. Modify the variables of the NetCDF dataset to represent the presence of dimensionless vertical coordinates based on @@ -1236,8 +1221,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): _setncattr(cf_var, "formula_terms", formula_terms) def _get_dim_names(self, cube_or_mesh): - """ - Determine suitable CF-netCDF data dimension names. + """Determine suitable CF-netCDF data dimension names. Parameters ---------- @@ -1262,8 +1246,7 @@ def _get_dim_names(self, cube_or_mesh): def record_dimension( names_list, dim_name, length, matching_coords=None ): - """ - Record a file dimension, its length and associated "coordinates". + """Record a file dimension, its length and associated "coordinates". Record a file dimension, its length and associated "coordinates" (which may in fact also be connectivities). @@ -1485,8 +1468,7 @@ def cf_valid_var_name(var_name): @staticmethod def _cf_coord_standardised_units(coord): - """ - Determine a suitable units from a given coordinate. + """Determine a suitable units from a given coordinate. Parameters ---------- @@ -1547,8 +1529,7 @@ def _ensure_valid_dtype(self, values, src_name, src_object): return values def _create_cf_bounds(self, coord, cf_var, cf_name): - """ - Create the associated CF-netCDF bounds variable. + """Create the associated CF-netCDF bounds variable. Parameters ---------- @@ -1608,8 +1589,7 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): ) def _get_cube_variable_name(self, cube): - """ - Return a CF-netCDF variable name for the given cube. + """Return a CF-netCDF variable name for the given cube. Parameters ---------- @@ -1633,8 +1613,7 @@ def _get_cube_variable_name(self, cube): return cf_name def _get_coord_variable_name(self, cube_or_mesh, coord): - """ - Return a CF-netCDF variable name for a given coordinate-like element. + """Return a CF-netCDF variable name for a given coordinate-like element. Parameters ---------- @@ -1696,8 +1675,7 @@ def _get_coord_variable_name(self, cube_or_mesh, coord): return cf_name def _get_mesh_variable_name(self, mesh): - """ - Return a CF-netCDF variable name for the mesh. + """Return a CF-netCDF variable name for the mesh. Parameters ---------- @@ -1723,8 +1701,7 @@ def _get_mesh_variable_name(self, mesh): return cf_name def _create_mesh(self, mesh): - """ - Create a mesh variable in the netCDF dataset. + """Create a mesh variable in the netCDF dataset. Parameters ---------- @@ -1807,8 +1784,7 @@ def _create_generic_cf_array_var( element_dims=None, fill_value=None, ): - """ - Create theCF-netCDF variable given dimensional_metadata. + """Create theCF-netCDF variable given dimensional_metadata. Create the associated CF-netCDF variable in the netCDF dataset for the given dimensional_metadata. @@ -1955,8 +1931,7 @@ def _create_generic_cf_array_var( return cf_name def _create_cf_cell_methods(self, cube, dimension_names): - """ - Create CF-netCDF string representation of a cube cell methods. + """Create CF-netCDF string representation of a cube cell methods. Parameters ---------- @@ -2007,8 +1982,7 @@ def _create_cf_cell_methods(self, cube, dimension_names): return " ".join(cell_methods) def _create_cf_grid_mapping(self, cube, cf_var_cube): - """ - Create CF-netCDF grid mapping and associated CF-netCDF variable. + """Create CF-netCDF grid mapping and associated CF-netCDF variable. Create CF-netCDF grid mapping variable and associated CF-netCDF data variable grid mapping attribute. @@ -2278,8 +2252,7 @@ def _create_cf_data_variable( fill_value=None, **kwargs, ): - """ - Create CF-netCDF data variable for the cube and any associated grid mapping. + """Create CF-netCDF data variable for the cube and any associated grid mapping. # TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can # be removed. @@ -2352,8 +2325,7 @@ def _create_cf_data_variable( dtype = data.dtype.newbyteorder("=") def set_packing_ncattrs(cfvar): - """ - Set netCDF packing attributes. + """Set netCDF packing attributes. NOTE: cfvar needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. @@ -2447,8 +2419,7 @@ def set_packing_ncattrs(cfvar): return cf_var def _increment_name(self, varname): - """ - Increment string name or begin increment. + """Increment string name or begin increment. Avoidance of conflicts between variable names, where the name is incremented to distinguish it from others. @@ -2566,8 +2537,7 @@ def store(data, cf_var, fill_info): ) def delayed_completion(self) -> Delayed: - """ - Perform file completion for delayed saves. + """Perform file completion for delayed saves. Create and return a :class:`dask.delayed.Delayed` to perform file completion for delayed saves. @@ -2638,8 +2608,7 @@ def no_op(): return result def complete(self, issue_warnings=True) -> List[Warning]: - """ - Complete file by computing any delayed variable saves. + """Complete file by computing any delayed variable saves. This requires that the Saver has closed the dataset (exited its context). @@ -2692,8 +2661,7 @@ def save( fill_value=None, compute=True, ): - r""" - Save cube(s) to a netCDF file, given the cube and the filename. + r"""Save cube(s) to a netCDF file, given the cube and the filename. * Iris will write CF 1.7 compliant NetCDF files. * **If split-attribute saving is disabled**, i.e. diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 08586c81b7..6dde73fb68 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -36,8 +36,7 @@ def __setitem__(self, key, value): def run_callback(callback, cube, field, filename): - """ - Run the callback mechanism given the appropriate arguments. + """Run the callback mechanism given the appropriate arguments. Parameters ---------- @@ -83,8 +82,7 @@ def run_callback(callback, cube, field, filename): def decode_uri(uri, default="file"): - r""" - Decode a single URI into scheme and scheme-specific parts. + r"""Decode a single URI into scheme and scheme-specific parts. In addition to well-formed URIs, it also supports bare file paths as strings or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are @@ -146,8 +144,7 @@ def decode_uri(uri, default="file"): def expand_filespecs(file_specs, files_expected=True): - """ - Find all matching file paths from a list of file-specs. + """Find all matching file paths from a list of file-specs. Parameters ---------- @@ -201,8 +198,7 @@ def expand_filespecs(file_specs, files_expected=True): def load_files(filenames, callback, constraints=None): - """ - Create a generator of Cubes from given files. + """Create a generator of Cubes from given files. Take a list of filenames which may also be globs, and optionally a constraint set and a callback function, and returns a @@ -241,8 +237,7 @@ def load_files(filenames, callback, constraints=None): def load_http(urls, callback): - """ - Create generator of Cubes from the given OPeNDAP URLs. + """Create generator of Cubes from the given OPeNDAP URLs. Take a list of OPeNDAP URLs and a callback function, and returns a generator of Cubes from the given URLs. @@ -276,8 +271,7 @@ def load_http(urls, callback): def load_data_objects(urls, callback): - """ - Take a list of data-source objects and a callback function, returns a generator of Cubes. + """Take a list of data-source objects and a callback function, returns a generator of Cubes. The 'objects' take the place of 'uris' in the load calls. The appropriate types of the data-source objects are expected to be @@ -343,8 +337,7 @@ def _check_init_savers(): def add_saver(file_extension, new_saver): - """ - Add a custom saver to the Iris session. + """Add a custom saver to the Iris session. Parameters ---------- @@ -370,8 +363,7 @@ def add_saver(file_extension, new_saver): def find_saver(filespec): - """ - Find the saver function appropriate to the given filename or extension. + """Find the saver function appropriate to the given filename or extension. Parameters ---------- @@ -401,8 +393,7 @@ def find_saver(filespec): def save(source, target, saver=None, **kwargs): - """ - Save one or more Cubes to file (or other writeable). + """Save one or more Cubes to file (or other writeable). Iris currently supports three file formats for saving, which it can recognise by filename extension: From 0cdead673dacd59538b68616867d42703e3a4892 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Wed, 13 Dec 2023 14:07:08 +0000 Subject: [PATCH 111/134] Line length 88 (#5632) * black line-length=88 * isort line-length=88 * fix doctest warning line numbers * add whatsnew entry --- benchmarks/asv_delegated_conda.py | 20 +- benchmarks/benchmarks/cperf/save.py | 5 +- benchmarks/benchmarks/cube.py | 17 +- .../benchmarks/experimental/ugrid/__init__.py | 4 +- .../experimental/ugrid/regions_combine.py | 11 +- .../benchmarks/generate_data/__init__.py | 8 +- benchmarks/benchmarks/generate_data/stock.py | 16 +- benchmarks/benchmarks/generate_data/ugrid.py | 14 +- .../benchmarks/generate_data/um_files.py | 8 +- benchmarks/benchmarks/load/__init__.py | 12 +- benchmarks/benchmarks/load/ugrid.py | 4 +- benchmarks/benchmarks/regridding.py | 8 +- .../benchmarks/sperf/combine_regions.py | 19 +- benchmarks/benchmarks/trajectory.py | 8 +- benchmarks/bm_runner.py | 47 +- .../general/plot_SOI_filtering.py | 8 +- .../general/plot_cross_section.py | 8 +- .../general/plot_custom_file_loading.py | 12 +- .../general/plot_polynomial_fit.py | 4 +- .../plot_projections_and_annotations.py | 16 +- docs/gallery_code/meteorology/plot_COP_1d.py | 4 +- .../gallery_code/meteorology/plot_COP_maps.py | 8 +- .../meteorology/plot_deriving_phenomena.py | 4 +- .../meteorology/plot_lagged_ensemble.py | 8 +- .../meteorology/plot_wind_barbs.py | 4 +- .../meteorology/plot_wind_speed.py | 4 +- .../oceanography/plot_load_nemo.py | 6 +- .../oceanography/plot_orca_projection.py | 4 +- .../src/further_topics/filtering_warnings.rst | 12 +- .../src/userguide/plotting_examples/brewer.py | 4 +- .../regridding_plots/regridded_to_global.py | 2 +- .../regridded_to_global_area_weighted.py | 4 +- .../regridding_plots/regridded_to_rotated.py | 2 +- .../regridding_plots/regridding_plot.py | 4 +- docs/src/whatsnew/latest.rst | 3 + lib/iris/__init__.py | 11 +- lib/iris/_concatenate.py | 96 ++-- lib/iris/_constraints.py | 19 +- lib/iris/_data_manager.py | 8 +- lib/iris/_lazy_data.py | 4 +- lib/iris/_merge.py | 109 ++--- lib/iris/_representation/cube_printout.py | 14 +- lib/iris/_representation/cube_summary.py | 48 +- lib/iris/analysis/__init__.py | 110 ++--- lib/iris/analysis/_area_weighted.py | 35 +- lib/iris/analysis/_grid_angles.py | 17 +- lib/iris/analysis/_interpolation.py | 46 +- lib/iris/analysis/_regrid.py | 64 +-- lib/iris/analysis/_scipy_interpolate.py | 28 +- lib/iris/analysis/calculus.py | 35 +- lib/iris/analysis/cartography.py | 81 ++-- lib/iris/analysis/geometry.py | 8 +- lib/iris/analysis/maths.py | 45 +- lib/iris/analysis/stats.py | 28 +- lib/iris/analysis/trajectory.py | 53 +-- lib/iris/aux_factory.py | 258 +++-------- lib/iris/common/lenient.py | 4 +- lib/iris/common/metadata.py | 86 +--- lib/iris/common/mixin.py | 16 +- lib/iris/common/resolve.py | 113 ++--- lib/iris/config.py | 4 +- lib/iris/coord_categorisation.py | 24 +- lib/iris/coord_systems.py | 75 +--- lib/iris/coords.py | 97 +--- lib/iris/cube.py | 305 ++++--------- lib/iris/exceptions.py | 3 +- lib/iris/experimental/raster.py | 27 +- lib/iris/experimental/regrid.py | 28 +- lib/iris/experimental/regrid_conservative.py | 4 +- lib/iris/experimental/representation.py | 43 +- lib/iris/experimental/stratify.py | 5 +- lib/iris/experimental/ugrid/cf.py | 24 +- lib/iris/experimental/ugrid/load.py | 47 +- lib/iris/experimental/ugrid/mesh.py | 163 ++----- lib/iris/experimental/ugrid/metadata.py | 10 +- lib/iris/experimental/ugrid/save.py | 8 +- lib/iris/experimental/ugrid/utils.py | 12 +- lib/iris/fileformats/__init__.py | 18 +- lib/iris/fileformats/_ff.py | 12 +- .../fileformats/_nc_load_rules/actions.py | 4 +- lib/iris/fileformats/_nc_load_rules/engine.py | 4 +- .../fileformats/_nc_load_rules/helpers.py | 131 ++---- lib/iris/fileformats/_pp_lbproc_pairs.py | 5 +- .../_structured_array_identification.py | 26 +- lib/iris/fileformats/abf.py | 5 +- lib/iris/fileformats/cf.py | 91 ++-- lib/iris/fileformats/dot.py | 31 +- lib/iris/fileformats/name.py | 4 +- lib/iris/fileformats/name_loaders.py | 94 +--- lib/iris/fileformats/netcdf/__init__.py | 5 +- .../fileformats/netcdf/_thread_safe_nc.py | 29 +- lib/iris/fileformats/netcdf/loader.py | 20 +- lib/iris/fileformats/netcdf/saver.py | 169 ++----- lib/iris/fileformats/nimrod.py | 8 +- lib/iris/fileformats/nimrod_load_rules.py | 67 +-- lib/iris/fileformats/pp.py | 131 ++---- lib/iris/fileformats/pp_load_rules.py | 123 +----- lib/iris/fileformats/pp_save_rules.py | 41 +- lib/iris/fileformats/rules.py | 29 +- lib/iris/fileformats/um/_fast_load.py | 16 +- .../um/_fast_load_structured_fields.py | 17 +- .../um/_optimal_array_structuring.py | 8 +- lib/iris/io/__init__.py | 39 +- lib/iris/io/format_picker.py | 3 +- lib/iris/iterate.py | 17 +- lib/iris/palette.py | 14 +- lib/iris/pandas.py | 31 +- lib/iris/plot.py | 102 ++--- lib/iris/quickplot.py | 13 +- lib/iris/symbols.py | 8 +- lib/iris/tests/__init__.py | 62 +-- ..._area_weighted_rectilinear_src_and_grid.py | 24 +- .../test_regrid_conservative_via_esmpy.py | 20 +- lib/iris/tests/experimental/test_raster.py | 4 +- lib/iris/tests/graphics/__init__.py | 8 +- lib/iris/tests/graphics/idiff.py | 12 +- lib/iris/tests/graphics/recreate_imagerepo.py | 14 +- .../analysis/test_area_weighted.py | 4 +- .../aux_factory/test_OceanSigmaZFactory.py | 12 +- .../concatenate/test_concatenate.py | 60 +-- .../test_regrid_ProjectedUnstructured.py | 24 +- .../experimental/test_ugrid_load.py | 18 +- .../experimental/test_ugrid_save.py | 16 +- .../integration/fast_load/test_fast_load.py | 31 +- .../integration/netcdf/test__dask_locks.py | 5 +- .../integration/netcdf/test_attributes.py | 4 +- .../integration/netcdf/test_aux_factories.py | 16 +- .../integration/netcdf/test_delayed_save.py | 40 +- .../tests/integration/netcdf/test_general.py | 20 +- .../netcdf/test_self_referencing.py | 8 +- .../tests/integration/plot/test_colorbar.py | 21 +- .../tests/integration/plot/test_nzdateline.py | 3 +- .../integration/plot/test_vector_plots.py | 20 +- lib/iris/tests/integration/test_cube.py | 8 +- .../integration/test_netcdf__loadsaveattrs.py | 125 ++---- lib/iris/tests/integration/test_pp.py | 62 +-- .../integration/test_regrid_equivalence.py | 4 +- lib/iris/tests/integration/test_regridding.py | 8 +- lib/iris/tests/integration/test_trajectory.py | 40 +- lib/iris/tests/pp.py | 8 +- lib/iris/tests/stock/__init__.py | 60 +-- lib/iris/tests/stock/_stock_2d_latlons.py | 10 +- lib/iris/tests/stock/mesh.py | 16 +- lib/iris/tests/stock/netcdf.py | 16 +- lib/iris/tests/system_test.py | 16 +- lib/iris/tests/test_aggregate_by.py | 188 ++------ lib/iris/tests/test_analysis.py | 261 +++-------- lib/iris/tests/test_analysis_calculus.py | 52 +-- lib/iris/tests/test_basic_maths.py | 52 +-- lib/iris/tests/test_cartography.py | 12 +- lib/iris/tests/test_cdm.py | 176 ++------ lib/iris/tests/test_cell.py | 12 +- lib/iris/tests/test_cf.py | 76 +--- lib/iris/tests/test_coding_standards.py | 28 +- lib/iris/tests/test_concatenate.py | 106 ++--- lib/iris/tests/test_constraints.py | 57 +-- lib/iris/tests/test_coord_api.py | 100 ++--- lib/iris/tests/test_coordsystem.py | 56 +-- lib/iris/tests/test_cube_to_pp.py | 44 +- lib/iris/tests/test_ff.py | 12 +- lib/iris/tests/test_file_save.py | 15 +- lib/iris/tests/test_hybrid.py | 20 +- lib/iris/tests/test_imports.py | 8 +- lib/iris/tests/test_intersect.py | 8 +- lib/iris/tests/test_io_init.py | 4 +- lib/iris/tests/test_iterate.py | 32 +- lib/iris/tests/test_lazy_aggregate_by.py | 12 +- lib/iris/tests/test_load.py | 16 +- lib/iris/tests/test_mapping.py | 12 +- lib/iris/tests/test_merge.py | 165 ++----- lib/iris/tests/test_name.py | 24 +- lib/iris/tests/test_netcdf.py | 154 ++----- lib/iris/tests/test_peak.py | 16 +- lib/iris/tests/test_pickling.py | 12 +- lib/iris/tests/test_plot.py | 47 +- lib/iris/tests/test_pp_cf.py | 18 +- lib/iris/tests/test_pp_module.py | 16 +- lib/iris/tests/test_pp_stash.py | 24 +- lib/iris/tests/test_pp_to_cube.py | 26 +- lib/iris/tests/test_quickplot.py | 32 +- lib/iris/tests/test_util.py | 52 +-- .../test_AreaWeightedRegridder.py | 20 +- .../cartography/test__quadrant_area.py | 13 +- .../analysis/cartography/test__xy_range.py | 4 +- .../analysis/cartography/test_area_weights.py | 8 +- .../cartography/test_gridcell_angles.py | 92 +--- .../unit/analysis/cartography/test_project.py | 16 +- .../cartography/test_rotate_grid_vectors.py | 16 +- .../analysis/cartography/test_rotate_winds.py | 61 +-- .../geometry/test_geometry_area_weights.py | 27 +- .../test_RectilinearInterpolator.py | 49 +- .../tests/unit/analysis/maths/__init__.py | 4 +- .../maths/test__inplace_common_checks.py | 40 +- .../unit/analysis/maths/test__output_dtype.py | 8 +- .../tests/unit/analysis/maths/test_add.py | 4 +- .../unit/analysis/maths/test_multiply.py | 4 +- .../unit/analysis/maths/test_subtract.py | 4 +- .../regrid/test_RectilinearRegridder.py | 31 +- .../regrid/test__CurvilinearRegridder.py | 44 +- .../test__RegularGridInterpolator.py | 4 +- .../unit/analysis/stats/test_pearsonr.py | 4 +- .../tests/unit/analysis/test_AreaWeighted.py | 4 +- lib/iris/tests/unit/analysis/test_COUNT.py | 8 +- lib/iris/tests/unit/analysis/test_Linear.py | 8 +- lib/iris/tests/unit/analysis/test_Nearest.py | 8 +- .../tests/unit/analysis/test_PERCENTILE.py | 16 +- .../tests/unit/analysis/test_PROPORTION.py | 4 +- .../analysis/test_PercentileAggregator.py | 20 +- .../tests/unit/analysis/test_PointInCell.py | 4 +- lib/iris/tests/unit/analysis/test_RMS.py | 4 +- lib/iris/tests/unit/analysis/test_STD_DEV.py | 4 +- lib/iris/tests/unit/analysis/test_SUM.py | 4 +- .../tests/unit/analysis/test_WPERCENTILE.py | 52 +-- .../test_WeightedPercentileAggregator.py | 30 +- ...t_UnstructuredNearestNeighbourRegridder.py | 20 +- ...est__nearest_neighbour_indices_ndcoords.py | 20 +- .../analysis/trajectory/test_interpolate.py | 44 +- .../test_AtmosphereSigmaFactory.py | 28 +- .../unit/aux_factory/test_AuxCoordFactory.py | 8 +- .../aux_factory/test_HybridPressureFactory.py | 20 +- .../unit/aux_factory/test_OceanSFactory.py | 4 +- .../unit/aux_factory/test_OceanSg1Factory.py | 8 +- .../unit/aux_factory/test_OceanSg2Factory.py | 8 +- .../aux_factory/test_OceanSigmaFactory.py | 4 +- .../aux_factory/test_OceanSigmaZFactory.py | 4 +- .../unit/common/lenient/test__Lenient.py | 40 +- .../common/lenient/test__lenient_client.py | 4 +- .../unit/common/lenient/test__qualname.py | 4 +- .../test_AncillaryVariableMetadata.py | 32 +- .../unit/common/metadata/test_BaseMetadata.py | 56 +-- .../metadata/test_CellMeasureMetadata.py | 64 +-- .../common/metadata/test_CoordMetadata.py | 144 ++---- .../unit/common/metadata/test_CubeMetadata.py | 93 +--- .../common/metadata/test__NamedTupleMeta.py | 5 +- .../common/metadata/test_metadata_filter.py | 14 +- .../metadata/test_metadata_manager_factory.py | 8 +- .../unit/common/mixin/test_CFVariableMixin.py | 64 +-- .../tests/unit/common/resolve/test_Resolve.py | 376 ++++------------ lib/iris/tests/unit/concatenate/__init__.py | 3 +- .../unit/concatenate/test__CoordMetaData.py | 15 +- .../unit/concatenate/test__CoordSignature.py | 8 +- .../unit/concatenate/test__CubeSignature.py | 8 +- .../unit/concatenate/test_concatenate.py | 16 +- .../unit/constraints/test_NameConstraint.py | 4 +- .../test_add_categorised_coord.py | 18 +- .../coord_categorisation/test_add_hour.py | 4 +- .../test_coord_categorisation.py | 12 +- .../unit/coord_systems/test_Geostationary.py | 8 +- .../tests/unit/coord_systems/test_Mercator.py | 16 +- .../coord_systems/test_ObliqueMercator.py | 16 +- .../coord_systems/test_PolarStereographic.py | 12 +- .../coord_systems/test_VerticalPerspective.py | 8 +- lib/iris/tests/unit/coords/__init__.py | 16 +- .../unit/coords/test_AncillaryVariable.py | 66 +-- lib/iris/tests/unit/coords/test_AuxCoord.py | 14 +- lib/iris/tests/unit/coords/test_Cell.py | 12 +- .../tests/unit/coords/test_CellMeasure.py | 5 +- lib/iris/tests/unit/coords/test_Coord.py | 134 ++---- lib/iris/tests/unit/coords/test_DimCoord.py | 12 +- .../unit/coords/test__DimensionalMetadata.py | 36 +- lib/iris/tests/unit/cube/test_Cube.py | 418 +++++------------- .../tests/unit/cube/test_CubeAttrsDict.py | 8 +- lib/iris/tests/unit/cube/test_CubeList.py | 91 +--- .../unit/cube/test_Cube__aggregated_by.py | 112 ++--- .../raster/test_export_geotiff.py | 4 +- .../representation/test_CubeRepresentation.py | 27 +- .../experimental/stratify/test_relevel.py | 16 +- ...test_CFUGridAuxiliaryCoordinateVariable.py | 53 +-- .../cf/test_CFUGridConnectivityVariable.py | 57 +-- .../ugrid/cf/test_CFUGridGroup.py | 24 +- .../ugrid/cf/test_CFUGridMeshVariable.py | 45 +- .../ugrid/cf/test_CFUGridReader.py | 12 +- .../experimental/ugrid/load/test_load_mesh.py | 4 +- .../ugrid/load/test_load_meshes.py | 26 +- .../ugrid/mesh/test_Connectivity.py | 44 +- .../unit/experimental/ugrid/mesh/test_Mesh.py | 72 +-- .../experimental/ugrid/mesh/test_MeshCoord.py | 52 +-- .../ugrid/mesh/test_Mesh__from_coords.py | 44 +- .../metadata/test_ConnectivityMetadata.py | 144 ++---- .../ugrid/metadata/test_MeshCoordMetadata.py | 144 ++---- .../ugrid/metadata/test_MeshMetadata.py | 144 ++---- .../ugrid/utils/test_recombine_submeshes.py | 24 +- lib/iris/tests/unit/fileformats/__init__.py | 4 +- .../tests/unit/fileformats/cf/test_CFGroup.py | 12 +- .../unit/fileformats/cf/test_CFReader.py | 44 +- .../unit/fileformats/dot/test__dot_path.py | 4 +- .../tests/unit/fileformats/ff/test_ENDGame.py | 4 +- .../tests/unit/fileformats/ff/test_FF2PP.py | 36 +- .../unit/fileformats/ff/test_FFHeader.py | 8 +- ...test__build_lat_lon_for_NAME_timeseries.py | 5 +- .../name_loaders/test__generate_cubes.py | 12 +- .../actions/test__grid_mappings.py | 60 +-- .../actions/test__hybrid_formulae.py | 68 +-- .../actions/test__latlon_dimcoords.py | 32 +- .../actions/test__miscellaneous.py | 4 +- .../actions/test__time_coords.py | 12 +- .../nc_load_rules/engine/test_engine.py | 13 +- ...ild_albers_equal_area_coordinate_system.py | 4 +- .../test_build_auxiliary_coordinate.py | 12 +- .../test_build_dimension_coordinate.py | 48 +- ...t_build_geostationary_coordinate_system.py | 8 +- ..._azimuthal_equal_area_coordinate_system.py | 8 +- ...ild_lambert_conformal_coordinate_system.py | 4 +- .../test_build_mercator_coordinate_system.py | 20 +- ...uild_oblique_mercator_coordinate_system.py | 19 +- ...t_build_stereographic_coordinate_system.py | 4 +- ...d_transverse_mercator_coordinate_system.py | 12 +- .../test_has_supported_mercator_parameters.py | 7 +- ...upported_polar_stereographic_parameters.py | 39 +- .../helpers/test_parse_cell_methods.py | 12 +- .../netcdf/loader/test__chunk_control.py | 6 +- .../netcdf/loader/test__get_cf_var_data.py | 4 +- .../netcdf/loader/test__load_aux_factory.py | 18 +- ...__translate_constraints_to_var_callback.py | 8 +- .../netcdf/loader/test_load_cubes.py | 12 +- .../fileformats/netcdf/saver/test_Saver.py | 20 +- .../netcdf/saver/test_Saver__lazy.py | 8 +- .../saver/test_Saver__lazy_stream_data.py | 12 +- .../netcdf/saver/test_Saver__ugrid.py | 58 +-- .../saver/test__data_fillvalue_check.py | 4 +- .../netcdf/saver/test__fillvalue_report.py | 25 +- .../fileformats/netcdf/saver/test_save.py | 23 +- .../nimrod_load_rules/test_units.py | 36 +- .../nimrod_load_rules/test_vertical_coord.py | 12 +- .../tests/unit/fileformats/pp/test_PPField.py | 8 +- .../pp/test__convert_constraints.py | 4 +- .../pp/test__data_bytes_to_shaped_array.py | 20 +- .../fileformats/pp/test__interpret_field.py | 10 +- .../tests/unit/fileformats/pp/test_save.py | 40 +- .../pp_load_rules/test__all_other_rules.py | 4 +- ...__collapse_degenerate_points_and_bounds.py | 12 +- .../test__convert_time_coords.py | 61 +-- .../test__convert_vertical_coords.py | 24 +- .../pp_load_rules/test__epoch_date_hours.py | 20 +- .../fileformats/pp_load_rules/test_convert.py | 17 +- .../unit/fileformats/rules/test_Loader.py | 8 +- .../test_ArrayStructure.py | 16 +- .../test_GroupStructure.py | 19 +- lib/iris/tests/unit/fileformats/test_rules.py | 11 +- .../um/fast_load/test_FieldCollation.py | 16 +- .../um/fast_load/test__convert_collation.py | 20 +- .../test_BasicFieldCollation.py | 24 +- .../test_group_structured_fields.py | 20 +- .../test_optimal_array_structure.py | 24 +- .../unit/fileformats/um/test_um_to_pp.py | 8 +- .../tests/unit/io/test_expand_filespecs.py | 8 +- lib/iris/tests/unit/io/test_run_callback.py | 14 +- .../tests/unit/lazy_data/test_as_lazy_data.py | 12 +- .../lazy_data/test_map_complete_blocks.py | 20 +- lib/iris/tests/unit/merge/test_ProtoCube.py | 40 +- lib/iris/tests/unit/pandas/test_pandas.py | 295 ++++-------- lib/iris/tests/unit/plot/__init__.py | 4 +- lib/iris/tests/unit/plot/_blockplot_common.py | 4 +- .../test__check_bounds_contiguity_and_mask.py | 9 +- ..._check_geostationary_coords_and_convert.py | 3 +- .../tests/unit/plot/test__get_plot_defn.py | 12 +- ...est__get_plot_defn_custom_coords_picked.py | 30 +- .../test__replace_axes_with_cartopy_axes.py | 4 +- lib/iris/tests/unit/plot/test_plot.py | 12 +- lib/iris/tests/unit/plot/test_scatter.py | 4 +- lib/iris/tests/unit/quickplot/test_pcolor.py | 4 +- .../tests/unit/quickplot/test_pcolormesh.py | 4 +- .../cube_printout/test_CubePrintout.py | 40 +- .../cube_printout/test_Table.py | 4 +- .../cube_summary/test_CubeSummary.py | 36 +- lib/iris/tests/unit/test_Future.py | 4 +- lib/iris/tests/unit/test_sample_data_path.py | 12 +- .../tests/unit/tests/stock/test_netcdf.py | 24 +- .../tests/unit/time/test_PartialDateTime.py | 20 +- .../unit/util/test__slice_data_with_keys.py | 20 +- .../unit/util/test_broadcast_to_shape.py | 4 +- .../unit/util/test_equalise_attributes.py | 15 +- .../unit/util/test_file_is_newer_than.py | 8 +- .../unit/util/test_find_discontiguities.py | 4 +- lib/iris/tests/unit/util/test_mask_cube.py | 8 +- lib/iris/tests/unit/util/test_new_axis.py | 16 +- .../test_promote_aux_coord_to_dim_coord.py | 4 +- lib/iris/tests/unit/util/test_reverse.py | 57 +-- .../tests/unit/util/test_rolling_window.py | 4 +- .../tests/unit/util/test_unify_time_units.py | 8 +- lib/iris/util.py | 100 ++--- noxfile.py | 8 +- pyproject.toml | 6 +- setup.py | 9 +- 384 files changed, 3145 insertions(+), 9207 deletions(-) diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index a60cb7f2b7..8851c21108 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -83,14 +83,10 @@ def __init__( super().__init__(conf, python, requirements, tagged_env_vars) self._update_info() - self._env_commands = self._interpolate_commands( - conf.delegated_env_commands - ) + self._env_commands = self._interpolate_commands(conf.delegated_env_commands) # Again using _interpolate_commands to get env parent path - allows use # of the same ASV env variables. - env_parent_interpolated = self._interpolate_commands( - conf.delegated_env_parent - ) + env_parent_interpolated = self._interpolate_commands(conf.delegated_env_parent) # Returns list of tuples, we just want the first. env_parent_first = env_parent_interpolated[0] # The 'command' is the first item in the returned tuple. @@ -152,9 +148,7 @@ def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: # Adapt the build_dir to the cache location. build_root_path = Path(self._build_root) build_dir_original = build_root_path / self._repo_subdir - build_dir_subpath = build_dir_original.relative_to( - build_root_path.parent - ) + build_dir_subpath = build_dir_original.relative_to(build_root_path.parent) build_dir = asv_cache_path / build_dir_subpath # Run the script(s) for delegated environment creation/updating. @@ -184,9 +178,7 @@ def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: env_path.unlink(missing_ok=True) except IsADirectoryError: rmtree(env_path) - env_path.symlink_to( - delegated_env_path, target_is_directory=True - ) + env_path.symlink_to(delegated_env_path, target_is_directory=True) # Check that environment exists. try: @@ -206,6 +198,4 @@ def checkout_project(self, repo: Repo, commit_hash: str) -> None: """Check out the working tree of the project at given commit hash.""" super().checkout_project(repo, commit_hash) self._prep_env() - log.info( - f"Environment {self.name} updated to spec at {commit_hash[:8]}" - ) + log.info(f"Environment {self.name} updated to spec at {commit_hash[:8]}") diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py index 957b28e3fd..fe7ee8b4d2 100644 --- a/benchmarks/benchmarks/cperf/save.py +++ b/benchmarks/benchmarks/cperf/save.py @@ -10,10 +10,7 @@ from . import _N_CUBESPHERE_UM_EQUIVALENT, _UM_DIMS_YX from .. import TrackAddedMemoryAllocation, on_demand_benchmark -from ..generate_data.ugrid import ( - make_cube_like_2d_cubesphere, - make_cube_like_umfield, -) +from ..generate_data.ugrid import make_cube_like_2d_cubesphere, make_cube_like_umfield @on_demand_benchmark diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index ceacb4e86c..57aec690e0 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -117,10 +117,7 @@ def setup(self): # Variables needed by the overridden time_add benchmark in this subclass. cube_w_coord = self.cube.copy() - [ - cube_w_coord.remove_aux_factory(i) - for i in cube_w_coord.aux_factories - ] + [cube_w_coord.remove_aux_factory(i) for i in cube_w_coord.aux_factories] self.cube_w_coord = cube_w_coord def time_add(self): @@ -158,9 +155,7 @@ def setup(self): ancillary_variable = coords.AncillaryVariable(data_1d) # Variables needed by the ComponentCommon base class. - self.cube_kwargs = { - "ancillary_variables_and_dims": [(ancillary_variable, 0)] - } + self.cube_kwargs = {"ancillary_variables_and_dims": [(ancillary_variable, 0)]} self.add_method = cube.Cube.add_ancillary_variable self.add_args = (ancillary_variable, 0) @@ -176,9 +171,7 @@ class MeshCoord: param_names = ["number of faces"] def setup(self, n_faces): - mesh_kwargs = dict( - n_nodes=n_faces + 2, n_edges=n_faces * 2, n_faces=n_faces - ) + mesh_kwargs = dict(n_nodes=n_faces + 2, n_edges=n_faces * 2, n_faces=n_faces) self.mesh_coord = sample_meshcoord(sample_mesh_kwargs=mesh_kwargs) self.data = np.zeros(n_faces) @@ -186,9 +179,7 @@ def setup(self, n_faces): self.cube = self.create() def create(self): - return cube.Cube( - data=self.data, aux_coords_and_dims=[(self.mesh_coord, 0)] - ) + return cube.Cube(data=self.data, aux_coords_and_dims=[(self.mesh_coord, 0)]) def time_create(self, n_faces): _ = self.create() diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py index 1fa8b82d67..2ce7ba4623 100644 --- a/benchmarks/benchmarks/experimental/ugrid/__init__.py +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -53,9 +53,7 @@ def setup(self, n_faces): super().setup(n_faces) def create(self): - return ugrid.Connectivity( - indices=self.array, cf_role="face_node_connectivity" - ) + return ugrid.Connectivity(indices=self.array, cf_role="face_node_connectivity") def time_indices(self, n_faces): _ = self.object.indices diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index 6d62cf9cd5..04b5933e70 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -57,8 +57,7 @@ def _make_region_cubes(self, full_mesh_cube): n_facesperregion = n_faces // n_regions i_face_regions = (i_faces // n_facesperregion) % n_regions region_inds = [ - np.where(i_face_regions == i_region)[0] - for i_region in range(n_regions) + np.where(i_face_regions == i_region)[0] for i_region in range(n_regions) ] # NOTE: this produces 7 regions, with near-adjacent value ranges but # with some points "moved" to an adjacent region. @@ -90,9 +89,7 @@ def setup_cache(self): self._parametrised_cache_filename(n_cubesphere, "regioncubes"), ) - def setup( - self, n_cubesphere, imaginary_data=True, create_result_cube=True - ): + def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): """ The combine-tests "standard" setup operation. @@ -132,9 +129,7 @@ def setup( # This has the same lazy-array attributes, but is allocated by # creating chunks on demand instead of loading from file. data = cube.lazy_data() - data = da.zeros( - data.shape, dtype=data.dtype, chunks=data.chunksize - ) + data = da.zeros(data.shape, dtype=data.dtype, chunks=data.chunksize) cube.data = data if create_result_cube: diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py index 8837e7cca9..db4c6c0ca0 100644 --- a/benchmarks/benchmarks/generate_data/__init__.py +++ b/benchmarks/benchmarks/generate_data/__init__.py @@ -36,9 +36,7 @@ error = "Env variable DATA_GEN_PYTHON not defined." raise KeyError(error) except (CalledProcessError, FileNotFoundError, PermissionError): - error = ( - "Env variable DATA_GEN_PYTHON not a runnable python executable path." - ) + error = "Env variable DATA_GEN_PYTHON not a runnable python executable path." raise ValueError(error) # The default location of data files used in benchmarks. Used by CI. @@ -90,9 +88,7 @@ def run_function_elsewhere(func_to_run, *args, **kwargs): func_string = dedent(getsource(func_to_run)) func_string = func_string.replace("@staticmethod\n", "") func_call_term_strings = [repr(arg) for arg in args] - func_call_term_strings += [ - f"{name}={repr(val)}" for name, val in kwargs.items() - ] + func_call_term_strings += [f"{name}={repr(val)}" for name, val in kwargs.items()] func_call_string = ( f"{func_to_run.__name__}(" + ",".join(func_call_term_strings) + ")" ) diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py index b6702ad883..9b824efd17 100644 --- a/benchmarks/benchmarks/generate_data/stock.py +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -33,9 +33,7 @@ def _external(func_name_, temp_file_dir, **kwargs_): print(func(temp_file_dir, **kwargs_), end="") args_hash = hash_args(**kwargs) - save_path = (BENCHMARK_DATA / f"{func_name}_{args_hash}").with_suffix( - ".nc" - ) + save_path = (BENCHMARK_DATA / f"{func_name}_{args_hash}").with_suffix(".nc") if not REUSE_DATA or not save_path.is_file(): # The xios functions take control of save location so need to move to # a more specific name that allows reuse. @@ -105,13 +103,9 @@ def _external(*args, **kwargs): arg_list = [n_nodes, n_faces, n_edges] args_hash = hash_args(*arg_list) - save_path = (BENCHMARK_DATA / f"sample_mesh_{args_hash}").with_suffix( - ".nc" - ) + save_path = (BENCHMARK_DATA / f"sample_mesh_{args_hash}").with_suffix(".nc") if not REUSE_DATA or not save_path.is_file(): - _ = run_function_elsewhere( - _external, *arg_list, save_path=str(save_path) - ) + _ = run_function_elsewhere(_external, *arg_list, save_path=str(save_path)) with PARSE_UGRID_ON_LOAD.context(): if not lazy_values: # Realise everything. @@ -149,9 +143,7 @@ def _external(sample_mesh_kwargs_, save_path_): save_mesh(new_meshcoord.mesh, save_path_) args_hash = hash_args(**sample_mesh_kwargs) - save_path = ( - BENCHMARK_DATA / f"sample_mesh_coord_{args_hash}" - ).with_suffix(".nc") + save_path = (BENCHMARK_DATA / f"sample_mesh_coord_{args_hash}").with_suffix(".nc") if not REUSE_DATA or not save_path.is_file(): _ = run_function_elsewhere( _external, diff --git a/benchmarks/benchmarks/generate_data/ugrid.py b/benchmarks/benchmarks/generate_data/ugrid.py index 3be5c20a48..59114b1846 100644 --- a/benchmarks/benchmarks/generate_data/ugrid.py +++ b/benchmarks/benchmarks/generate_data/ugrid.py @@ -15,9 +15,7 @@ ) -def generate_cube_like_2d_cubesphere( - n_cube: int, with_mesh: bool, output_path: str -): +def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: str): """ Construct and save to file an LFRIc cubesphere-like cube for a given cubesphere size, *or* a simpler structured (UM-like) cube of equivalent @@ -71,9 +69,7 @@ def make_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool): files in our common testdata directory. """ - identifying_filename = ( - f"cube_like_2d_cubesphere_C{n_cube}_Mesh={with_mesh}.nc" - ) + identifying_filename = f"cube_like_2d_cubesphere_C{n_cube}_Mesh={with_mesh}.nc" filepath = BENCHMARK_DATA / identifying_filename if not filepath.exists(): # Create the required testfile, by running the generation code remotely @@ -151,9 +147,9 @@ def _external(xy_dims_, save_path_): save(cube, save_path_) - save_path = ( - BENCHMARK_DATA / f"make_cube_like_umfield_{xy_dims}" - ).with_suffix(".nc") + save_path = (BENCHMARK_DATA / f"make_cube_like_umfield_{xy_dims}").with_suffix( + ".nc" + ) if not REUSE_DATA or not save_path.is_file(): _ = run_function_elsewhere(_external, xy_dims, str(save_path)) with PARSE_UGRID_ON_LOAD.context(): diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py index 23d3770aa1..110260de42 100644 --- a/benchmarks/benchmarks/generate_data/um_files.py +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -89,15 +89,11 @@ def add_field(level_: int, time_step_: int) -> None: three_rec = six_rec / 2 new_field.blev = level_1**2 * six_rec - six_rec - new_field.brsvd1 = ( - level_1**2 * six_rec + (six_rec * level_1) - three_rec - ) + new_field.brsvd1 = level_1**2 * six_rec + (six_rec * level_1) - three_rec brsvd2_simulated = np.linspace(0.995, 0, len_z) shift = min(len_z, 2) - bhrlev_simulated = np.concatenate( - [np.ones(shift), brsvd2_simulated[:-shift]] - ) + bhrlev_simulated = np.concatenate([np.ones(shift), brsvd2_simulated[:-shift]]) new_field.brsvd2 = brsvd2_simulated[level_] new_field.bhrlev = bhrlev_simulated[level_] diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index a926e6b7e2..3f4b9b222b 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -77,14 +77,10 @@ def setup_cache(self) -> dict: file_path_dict = {} for xyz in self.params[0]: x, y, z = xyz - file_path_dict[xyz] = create_um_files( - x, y, z, 1, False, file_type_args - ) + file_path_dict[xyz] = create_um_files(x, y, z, 1, False, file_type_args) return file_path_dict - def setup( - self, file_path_dict: dict, xyz: tuple, file_format: str - ) -> None: + def setup(self, file_path_dict: dict, xyz: tuple, file_format: str) -> None: self.file_path = file_path_dict[xyz][file_format] def time_stash_constraint(self, _, __, ___) -> None: @@ -104,9 +100,7 @@ def setup_cache(self) -> dict: ) return file_path_dict - def setup( - self, file_path_dict: dict, time_dim_len: int, file_format: str - ) -> None: + def setup(self, file_path_dict: dict, time_dim_len: int, file_format: str) -> None: self.file_path = file_path_dict[time_dim_len][file_format] self.time_constr = Constraint(time=lambda cell: cell.point.year < 3) diff --git a/benchmarks/benchmarks/load/ugrid.py b/benchmarks/benchmarks/load/ugrid.py index ef01ae03be..cfbe55f2ad 100644 --- a/benchmarks/benchmarks/load/ugrid.py +++ b/benchmarks/benchmarks/load/ugrid.py @@ -97,9 +97,7 @@ class DataRealisationTime(DataRealisation): param_names = ["number of time steps"] def setup(self, *args): - self.setup_common( - dataset_name="Realisation", n_faces=1, n_times=args[0] - ) + self.setup_common(dataset_name="Realisation", n_faces=1, n_times=args[0]) class Callback: diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py index 9cd77527af..a14d7c2668 100644 --- a/benchmarks/benchmarks/regridding.py +++ b/benchmarks/benchmarks/regridding.py @@ -22,9 +22,7 @@ class HorizontalChunkedRegridding: def setup(self) -> None: # Prepare a cube and a template - cube_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_xyt.nc"] - ) + cube_file_path = tests.get_data_path(["NetCDF", "regrid", "regrid_xyt.nc"]) self.cube = iris.load_cube(cube_file_path) # Prepare a tougher cube and chunk it @@ -61,9 +59,7 @@ class CurvilinearRegridding: def setup(self) -> None: # Prepare a cube and a template - cube_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_xyt.nc"] - ) + cube_file_path = tests.get_data_path(["NetCDF", "regrid", "regrid_xyt.nc"]) self.cube = iris.load_cube(cube_file_path) # Make the source cube curvilinear diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py index da0cffde50..1c37275079 100644 --- a/benchmarks/benchmarks/sperf/combine_regions.py +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -53,8 +53,7 @@ def _make_region_cubes(self, full_mesh_cube): n_facesperregion = n_faces // n_regions i_face_regions = (i_faces // n_facesperregion) % n_regions region_inds = [ - np.where(i_face_regions == i_region)[0] - for i_region in range(n_regions) + np.where(i_face_regions == i_region)[0] for i_region in range(n_regions) ] # NOTE: this produces 7 regions, with near-adjacent value ranges but # with some points "moved" to an adjacent region. @@ -86,9 +85,7 @@ def setup_cache(self): self._parametrised_cache_filename(n_cubesphere, "regioncubes"), ) - def setup( - self, n_cubesphere, imaginary_data=True, create_result_cube=True - ): + def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): """ The combine-tests "standard" setup operation. @@ -128,9 +125,7 @@ def setup( # This has the same lazy-array attributes, but is allocated by # creating chunks on demand instead of loading from file. data = cube.lazy_data() - data = da.zeros( - data.shape, dtype=data.dtype, chunks=data.chunksize - ) + data = da.zeros(data.shape, dtype=data.dtype, chunks=data.chunksize) cube.data = data if create_result_cube: @@ -181,9 +176,7 @@ class CreateCube(Mixin): """ - def setup( - self, n_cubesphere, imaginary_data=True, create_result_cube=False - ): + def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=False): # In this case only, do *not* create the result cube. # That is the operation we want to test. super().setup(n_cubesphere, imaginary_data, create_result_cube) @@ -240,9 +233,7 @@ class FileStreamedCalc(Mixin): cubes on disk. """ - def setup( - self, n_cubesphere, imaginary_data=False, create_result_cube=True - ): + def setup(self, n_cubesphere, imaginary_data=False, create_result_cube=True): # In this case only, do *not* replace the loaded regions data with # 'imaginary' data, as we want to test file-to-file calculation+save. super().setup(n_cubesphere, imaginary_data, create_result_cube) diff --git a/benchmarks/benchmarks/trajectory.py b/benchmarks/benchmarks/trajectory.py index e4c3297614..0c99bf77c1 100644 --- a/benchmarks/benchmarks/trajectory.py +++ b/benchmarks/benchmarks/trajectory.py @@ -21,14 +21,10 @@ class TrajectoryInterpolation: def setup(self) -> None: # Prepare a cube and a template - cube_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_xyt.nc"] - ) + cube_file_path = tests.get_data_path(["NetCDF", "regrid", "regrid_xyt.nc"]) self.cube = iris.load_cube(cube_file_path) - trajectory = np.array( - [np.array((-50 + i, -50 + i)) for i in range(100)] - ) + trajectory = np.array([np.array((-50 + i, -50 + i)) for i in range(100)]) self.sample_points = [ ("longitude", trajectory[:, 0]), ("latitude", trajectory[:, 1]), diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 1efe8d3acb..3f1d74b552 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -30,9 +30,7 @@ GH_REPORT_DIR = ROOT_DIR.joinpath(".github", "workflows", "benchmark_reports") # Common ASV arguments for all run_types except `custom`. -ASV_HARNESS = ( - "run {posargs} --attribute rounds=4 --interleave-rounds --show-stderr" -) +ASV_HARNESS = "run {posargs} --attribute rounds=4 --interleave-rounds --show-stderr" def echo(echo_string: str): @@ -148,18 +146,14 @@ def _asv_compare(*commits: str, overnight_mode: bool = False) -> None: comparison = _subprocess_runner_capture(asv_command, asv=True) echo(comparison) - shifts = _subprocess_runner_capture( - [*asv_command, "--only-changed"], asv=True - ) + shifts = _subprocess_runner_capture([*asv_command, "--only-changed"], asv=True) if shifts or (not overnight_mode): # For the overnight run: only post if there are shifts. _gh_create_reports(after, comparison, shifts) -def _gh_create_reports( - commit_sha: str, results_full: str, results_shifts: str -) -> None: +def _gh_create_reports(commit_sha: str, results_full: str, results_shifts: str) -> None: """ If running under GitHub Actions: record the results in report(s). @@ -174,9 +168,7 @@ def _gh_create_reports( on_pull_request = pr_number is not None run_id = environ["GITHUB_RUN_ID"] repo = environ["GITHUB_REPOSITORY"] - gha_run_link = ( - f"[`{run_id}`](https://github.com/{repo}/actions/runs/{run_id})" - ) + gha_run_link = f"[`{run_id}`](https://github.com/{repo}/actions/runs/{run_id})" GH_REPORT_DIR.mkdir(exist_ok=True) commit_dir = GH_REPORT_DIR / commit_sha @@ -387,9 +379,7 @@ def func(args: argparse.Namespace) -> None: commit_range = f"{args.first_commit}^^.." # git rev-list --first-parent is the command ASV uses. - git_command = shlex.split( - f"git rev-list --first-parent {commit_range}" - ) + git_command = shlex.split(f"git rev-list --first-parent {commit_range}") commit_string = _subprocess_runner_capture(git_command) commit_list = commit_string.split("\n") @@ -436,9 +426,7 @@ def func(args: argparse.Namespace) -> None: git_command = shlex.split("git rev-parse HEAD") head_sha = _subprocess_runner_capture(git_command)[:8] - git_command = shlex.split( - f"git merge-base {head_sha} {args.base_branch}" - ) + git_command = shlex.split(f"git merge-base {head_sha} {args.base_branch}") merge_base = _subprocess_runner_capture(git_command)[:8] with NamedTemporaryFile("w") as hashfile: @@ -474,20 +462,15 @@ def add_arguments(self) -> None: ) @staticmethod - def csperf( - args: argparse.Namespace, run_type: Literal["cperf", "sperf"] - ) -> None: + def csperf(args: argparse.Namespace, run_type: Literal["cperf", "sperf"]) -> None: _setup_common() publish_dir = Path(args.publish_dir) if not publish_dir.is_dir(): - message = ( - f"Input 'publish directory' is not a directory: {publish_dir}" - ) + message = f"Input 'publish directory' is not a directory: {publish_dir}" raise NotADirectoryError(message) publish_subdir = ( - publish_dir - / f"{run_type}_{datetime.now().strftime('%Y%m%d_%H%M%S')}" + publish_dir / f"{run_type}_{datetime.now().strftime('%Y%m%d_%H%M%S')}" ) publish_subdir.mkdir() @@ -496,14 +479,10 @@ def csperf( environ["ON_DEMAND_BENCHMARKS"] = "True" commit_range = "upstream/main^!" - asv_command = ( - ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}" - ) + asv_command = ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}" # Only do a single round. - asv_command = shlex.split( - re.sub(r"rounds=\d", "rounds=1", asv_command) - ) + asv_command = shlex.split(re.sub(r"rounds=\d", "rounds=1", asv_command)) try: _subprocess_runner([*asv_command, *args.asv_args], asv=True) except subprocess.CalledProcessError as err: @@ -513,9 +492,7 @@ def csperf( if err.returncode != 2: raise - asv_command = shlex.split( - f"publish {commit_range} --html-dir={publish_subdir}" - ) + asv_command = shlex.split(f"publish {commit_range} --html-dir={publish_subdir}") _subprocess_runner(asv_command, asv=True) # Print completion message. diff --git a/docs/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py index d7948ac965..507519808f 100644 --- a/docs/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -69,12 +69,8 @@ def main(): # Apply each filter using the rolling_window method used with the weights # keyword argument. A weighted sum is required because the magnitude of # the weights are just as important as their relative sizes. - soi24 = soi.rolling_window( - "time", iris.analysis.SUM, len(wgts24), weights=wgts24 - ) - soi84 = soi.rolling_window( - "time", iris.analysis.SUM, len(wgts84), weights=wgts84 - ) + soi24 = soi.rolling_window("time", iris.analysis.SUM, len(wgts24), weights=wgts24) + soi84 = soi.rolling_window("time", iris.analysis.SUM, len(wgts84), weights=wgts84) # Plot the SOI time series and both filtered versions. plt.figure(figsize=(9, 4)) diff --git a/docs/gallery_code/general/plot_cross_section.py b/docs/gallery_code/general/plot_cross_section.py index 12f4bdb0dc..42529e0885 100644 --- a/docs/gallery_code/general/plot_cross_section.py +++ b/docs/gallery_code/general/plot_cross_section.py @@ -22,13 +22,9 @@ def main(): # Extract a single height vs longitude cross-section. N.B. This could # easily be changed to extract a specific slice, or even to loop over *all* # cross section slices. - cross_section = next( - theta.slices(["grid_longitude", "model_level_number"]) - ) + cross_section = next(theta.slices(["grid_longitude", "model_level_number"])) - qplt.contourf( - cross_section, coords=["grid_longitude", "altitude"], cmap="RdBu_r" - ) + qplt.contourf(cross_section, coords=["grid_longitude", "altitude"], cmap="RdBu_r") iplt.show() # Now do the equivalent plot, only against model level diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index 4b817aea66..8040ea81f5 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -125,9 +125,7 @@ def load_NAME_III(filename): header_value = int(header_value) elif header_name in DATE_HEADERS: # convert the time to python datetimes - header_value = datetime.datetime.strptime( - header_value, UTC_format - ) + header_value = datetime.datetime.strptime(header_value, UTC_format) headers[header_name] = header_value @@ -194,9 +192,7 @@ def NAME_to_cube(filenames, callback): # information for each field into a dictionary of headers for just # this field. Ignore the first 4 columns of grid position (data was # located with the data array). - field_headings = dict( - (k, v[i + 4]) for k, v in column_headings.items() - ) + field_headings = dict((k, v[i + 4]) for k, v in column_headings.items()) # make an cube cube = iris.cube.Cube(data_array) @@ -333,9 +329,7 @@ def main(): r"$%s < x \leq %s$" % (levels[1], levels[2]), r"$x > %s$" % levels[2], ] - ax.legend( - artists, labels, title="Ash concentration / g m-3", loc="upper left" - ) + ax.legend(artists, labels, title="Ash concentration / g m-3", loc="upper left") time = cube.coord("time") time_date = time.units.num2date(time.points[0]).strftime(UTC_format) diff --git a/docs/gallery_code/general/plot_polynomial_fit.py b/docs/gallery_code/general/plot_polynomial_fit.py index 5da5d50571..0c113a2e6c 100644 --- a/docs/gallery_code/general/plot_polynomial_fit.py +++ b/docs/gallery_code/general/plot_polynomial_fit.py @@ -34,9 +34,7 @@ def main(): # Add the polynomial fit values to the time series to take # full advantage of Iris plotting functionality. long_name = "degree_{}_polynomial_fit_of_{}".format(degree, cube.name()) - fit = iris.coords.AuxCoord( - y_fitted, long_name=long_name, units=location.units - ) + fit = iris.coords.AuxCoord(y_fitted, long_name=long_name, units=location.units) location.add_aux_coord(fit, 0) qplt.plot(location.coord("time"), location, label="data") diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index c4254ad544..a5a5267f1c 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -56,9 +56,7 @@ def make_plot(projection_name, projection_crs): overlay_data = iris.load_cube(overlay_filepath, "total electron content") # NOTE: as above, "iris.plot.contour" calls "pyplot.contour" with a # 'transform' keyword, enabling Cartopy reprojection. - iplt.contour( - overlay_data, 20, linewidths=2.0, colors="darkgreen", linestyles="-" - ) + iplt.contour(overlay_data, 20, linewidths=2.0, colors="darkgreen", linestyles="-") # Draw a high resolution margin line, inset from the pcolormesh border. # First calculate rectangle corners, 7% in from each corner of the data. @@ -72,12 +70,8 @@ def make_plot(projection_name, projection_crs): steps = np.linspace(0, 1) zeros, ones = np.zeros(steps.size), np.ones(steps.size) x_delta, y_delta = (x_upper - x_lower), (y_upper - y_lower) - x_points = x_lower + x_delta * np.concatenate( - (steps, ones, steps[::-1], zeros) - ) - y_points = y_lower + y_delta * np.concatenate( - (zeros, steps, ones, steps[::-1]) - ) + x_points = x_lower + x_delta * np.concatenate((steps, ones, steps[::-1], zeros)) + y_points = y_lower + y_delta * np.concatenate((zeros, steps, ones, steps[::-1])) # Get the Iris coordinate system of the X coordinate (Y should be the same). cs_data1 = x_coord.coord_system # Construct an equivalent Cartopy coordinate reference system ("crs"). @@ -115,9 +109,7 @@ def make_plot(projection_name, projection_crs): ) # NOTE: the "plt.annotate call" does not have a "transform=" keyword, # so for this one we transform the coordinates with a Cartopy call. - at_x, at_y = ax.projection.transform_point( - lon, lat, src_crs=crs_latlon - ) + at_x, at_y = ax.projection.transform_point(lon, lat, src_crs=crs_latlon) plt.annotate( name, xy=(at_x, at_y), diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index 2181b89b8c..516b54dbff 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -90,9 +90,7 @@ def main(): ) # Constrain the period 1860-1999 and extract the observed data from a1b - constraint = iris.Constraint( - time=lambda cell: 1860 <= cell.point.year <= 1999 - ) + constraint = iris.Constraint(time=lambda cell: 1860 <= cell.point.year <= 1999) observed = a1b_mean.extract(constraint) # Assert that this data set is the same as the e1 scenario: diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 529018ec8c..076b70a3ad 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -152,12 +152,8 @@ def main(): fig, ax_array = plt.subplots(1, 2, figsize=(12, 5)) # Loop over our scenarios to make a plot for each. - for ax, experiment, label in zip( - ax_array, ["E1", "A1B"], ["E1", "A1B-Image"] - ): - exp_cube = scenarios.extract_cube( - iris.Constraint(Experiment=experiment) - ) + for ax, experiment, label in zip(ax_array, ["E1", "A1B"], ["E1", "A1B-Image"]): + exp_cube = scenarios.extract_cube(iris.Constraint(Experiment=experiment)) time_coord = exp_cube.coord("time") # Calculate the difference from the preindustial control run. diff --git a/docs/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py index b600941f35..e457afc383 100644 --- a/docs/gallery_code/meteorology/plot_deriving_phenomena.py +++ b/docs/gallery_code/meteorology/plot_deriving_phenomena.py @@ -42,9 +42,7 @@ def main(): iris.Constraint(phenom, model_level_number=1) for phenom in phenomena ] - air_potential_temperature, air_pressure = iris.load_cubes( - fname, constraints - ) + air_potential_temperature, air_pressure = iris.load_cubes(fname, constraints) # Define a coordinate which represents 1000 hPa p0 = coords.AuxCoord(1000, long_name="P0", units="hPa") diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index 0639c7ac1d..a84a348699 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -47,9 +47,7 @@ def realization_metadata(cube, field, fname): def main(): # Create a constraint to extract surface temperature cubes which have a # "realization" coordinate. - constraint = iris.Constraint( - "surface_temperature", realization=lambda value: True - ) + constraint = iris.Constraint("surface_temperature", realization=lambda value: True) # Use this to load our ensemble. The callback ensures all our members # have the "realization" coordinate and therefore they will all be loaded. surface_temp = iris.load_cube( @@ -128,9 +126,7 @@ def main(): # Nino 3.4 lies between: 170W and 120W, 5N and 5S, so use the intersection # method to restrict to this region. - nino_cube = surface_temp.intersection( - latitude=[-5, 5], longitude=[-170, -120] - ) + nino_cube = surface_temp.intersection(latitude=[-5, 5], longitude=[-170, -120]) # Calculate the horizontal mean for the nino region. mean = nino_cube.collapsed(["latitude", "longitude"], iris.analysis.MEAN) diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index b09040c64e..7945a7f896 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -32,9 +32,7 @@ def main(): # that a storm is passing over magnitude = (uwind**2 + vwind**2) ** 0.5 magnitude.convert_units("knot") - max_speed = magnitude.collapsed( - ("latitude", "longitude"), iris.analysis.MAX - ).data + max_speed = magnitude.collapsed(("latitude", "longitude"), iris.analysis.MAX).data max_desired = 65 uwind = uwind / max_speed * max_desired diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index 40d9d0da00..e00f9af654 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -34,9 +34,7 @@ def main(): qplt.contourf(windspeed, 20) # Show the lake on the current axes. - lakes = cfeat.NaturalEarthFeature( - "physical", "lakes", "50m", facecolor="none" - ) + lakes = cfeat.NaturalEarthFeature("physical", "lakes", "50m", facecolor="none") plt.gca().add_feature(lakes) # Add arrows to show the wind vectors. diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index b19f37e1f5..a6c0ce0de6 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -51,11 +51,7 @@ def main(): lon_string = "{:.3f}\u00B0 {}".format( abs(lon_point), "E" if lon_point > 0.0 else "W" ) - plt.title( - "{} at {} {}".format( - cube.long_name.capitalize(), lat_string, lon_string - ) - ) + plt.title("{} at {} {}".format(cube.long_name.capitalize(), lat_string, lon_string)) iplt.show() diff --git a/docs/gallery_code/oceanography/plot_orca_projection.py b/docs/gallery_code/oceanography/plot_orca_projection.py index 627be8591b..e4bc073a46 100644 --- a/docs/gallery_code/oceanography/plot_orca_projection.py +++ b/docs/gallery_code/oceanography/plot_orca_projection.py @@ -37,9 +37,7 @@ def main(): pcarree = projections["PlateCarree"] # Transform cube to target projection - new_cube, extent = iris.analysis.cartography.project( - cube, pcarree, nx=400, ny=200 - ) + new_cube, extent = iris.analysis.cartography.project(cube, pcarree, nx=400, ny=200) # Plot data in each projection for name in sorted(projections): diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst index a69247008a..e704b93de2 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/further_topics/filtering_warnings.rst @@ -47,9 +47,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:449: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:822: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:800: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -110,7 +110,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:449: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) :: @@ -125,10 +125,10 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=455) + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=449) ... my_operation() ... - iris/coord_systems.py:822: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:800: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: @@ -188,7 +188,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:449: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) ---- diff --git a/docs/src/userguide/plotting_examples/brewer.py b/docs/src/userguide/plotting_examples/brewer.py index f2ede9f9bc..905296279d 100644 --- a/docs/src/userguide/plotting_examples/brewer.py +++ b/docs/src/userguide/plotting_examples/brewer.py @@ -18,9 +18,7 @@ def main(): plt.axis("off") plt.imshow(a, aspect="auto", cmap=plt.get_cmap(m), origin="lower") pos = list(ax.get_position().bounds) - fig.text( - pos[0] - 0.01, pos[1], m, fontsize=8, horizontalalignment="right" - ) + fig.text(pos[0] - 0.01, pos[1], m, fontsize=8, horizontalalignment="right") plt.show() diff --git a/docs/src/userguide/regridding_plots/regridded_to_global.py b/docs/src/userguide/regridding_plots/regridded_to_global.py index b32eb90816..5ce6513ef0 100644 --- a/docs/src/userguide/regridding_plots/regridded_to_global.py +++ b/docs/src/userguide/regridding_plots/regridded_to_global.py @@ -12,7 +12,7 @@ plt.figure(figsize=(4, 3)) iplt.pcolormesh(global_psl) -plt.title("Air pressure\n" "on a global longitude latitude grid") +plt.title("Air pressure\non a global longitude latitude grid") ax = plt.gca() ax.coastlines() ax.gridlines() diff --git a/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py b/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py index a9e6493ae5..f53e624e03 100644 --- a/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py +++ b/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py @@ -36,9 +36,7 @@ size="medium", ) -plt.subplots_adjust( - hspace=0, wspace=0.05, left=0.001, right=0.999, bottom=0, top=0.955 -) +plt.subplots_adjust(hspace=0, wspace=0.05, left=0.001, right=0.999, bottom=0, top=0.955) # Iterate over each of the figure's axes, adding coastlines, gridlines # and setting the extent. diff --git a/docs/src/userguide/regridding_plots/regridded_to_rotated.py b/docs/src/userguide/regridding_plots/regridded_to_rotated.py index ba01369f22..cb54a016cb 100644 --- a/docs/src/userguide/regridding_plots/regridded_to_rotated.py +++ b/docs/src/userguide/regridding_plots/regridded_to_rotated.py @@ -13,7 +13,7 @@ plt.figure(figsize=(4, 3)) iplt.pcolormesh(rotated_air_temp, norm=plt.Normalize(260, 300)) -plt.title("Air temperature\n" "on a limited area rotated pole grid") +plt.title("Air temperature\non a limited area rotated pole grid") ax = plt.gca() ax.coastlines(resolution="50m") ax.gridlines() diff --git a/docs/src/userguide/regridding_plots/regridding_plot.py b/docs/src/userguide/regridding_plots/regridding_plot.py index a02d671c0a..c559e0e3e7 100644 --- a/docs/src/userguide/regridding_plots/regridding_plot.py +++ b/docs/src/userguide/regridding_plots/regridding_plot.py @@ -11,14 +11,14 @@ plt.subplot(1, 2, 1) iplt.pcolormesh(global_air_temp, norm=plt.Normalize(260, 300)) -plt.title("Air temperature\n" "on a global longitude latitude grid") +plt.title("Air temperature\non a global longitude latitude grid") ax = plt.gca() ax.coastlines() ax.gridlines() plt.subplot(1, 2, 2) iplt.pcolormesh(rotated_psl) -plt.title("Air pressure\n" "on a limited area rotated pole grid") +plt.title("Air pressure\non a limited area rotated pole grid") ax = plt.gca() ax.coastlines(resolution="50m") ax.gridlines() diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 57fe376947..423831f9b0 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -172,6 +172,9 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ adopted and configured the `ruff`_ linter. (:pull:`5623`) +#. `@bjlittle`_ configured the ``line-length = 88`` for `black`_, `isort`_ + and `ruff`_. (:pull:`5632`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index a10169b7bb..e4016ff4d5 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -141,9 +141,7 @@ def callback(cube, field, filename): class Future(threading.local): """Run-time configuration controller.""" - def __init__( - self, datum_support=False, pandas_ndim=False, save_split_attrs=False - ): + def __init__(self, datum_support=False, pandas_ndim=False, save_split_attrs=False): """ A container for run-time options controls. @@ -190,9 +188,7 @@ def __repr__(self): # msg = ('Future(example_future_flag={})') # return msg.format(self.example_future_flag) msg = "Future(datum_support={}, pandas_ndim={}, save_split_attrs={})" - return msg.format( - self.datum_support, self.pandas_ndim, self.save_split_attrs - ) + return msg.format(self.datum_support, self.pandas_ndim, self.save_split_attrs) # deprecated_options = {'example_future_flag': 'warning',} deprecated_options = {} @@ -478,8 +474,7 @@ def sample_data_path(*path_to_join): target = os.path.join(iris_sample_data.path, target) else: raise ImportError( - "Please install the 'iris-sample-data' package to " - "access sample data." + "Please install the 'iris-sample-data' package to access sample data." ) if not glob.glob(target): raise ValueError( diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 554f14d914..4ce5c8ee45 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -104,9 +104,7 @@ def __new__(mcs, coord, dims): defn = coord.metadata points_dtype = coord.core_points().dtype bounds_dtype = ( - coord.core_bounds().dtype - if coord.core_bounds() is not None - else None + coord.core_bounds().dtype if coord.core_bounds() is not None else None ) kwargs = {} # Add scalar flag metadata. @@ -123,9 +121,7 @@ def __new__(mcs, coord, dims): else: order = _DECREASING kwargs["order"] = order - metadata = super().__new__( - mcs, defn, dims, points_dtype, bounds_dtype, kwargs - ) + metadata = super().__new__(mcs, defn, dims, points_dtype, bounds_dtype, kwargs) return metadata __slots__ = () @@ -602,15 +598,11 @@ def match(self, other, error_on_mismatch): # Check dim coordinates. if self.dim_metadata != other.dim_metadata: differences = self._coordinate_differences(other, "dim_metadata") - msgs.append( - msg_template.format("Dimension coordinates", *differences) - ) + msgs.append(msg_template.format("Dimension coordinates", *differences)) # Check aux coordinates. if self.aux_metadata != other.aux_metadata: differences = self._coordinate_differences(other, "aux_metadata") - msgs.append( - msg_template.format("Auxiliary coordinates", *differences) - ) + msgs.append(msg_template.format("Auxiliary coordinates", *differences)) # Check cell measures. if self.cm_metadata != other.cm_metadata: differences = self._coordinate_differences(other, "cm_metadata") @@ -618,38 +610,26 @@ def match(self, other, error_on_mismatch): # Check ancillary variables. if self.av_metadata != other.av_metadata: differences = self._coordinate_differences(other, "av_metadata") - msgs.append( - msg_template.format("Ancillary variables", *differences) - ) + msgs.append(msg_template.format("Ancillary variables", *differences)) # Check derived coordinates. if self.derived_metadata != other.derived_metadata: - differences = self._coordinate_differences( - other, "derived_metadata" - ) - msgs.append( - msg_template.format("Derived coordinates", *differences) - ) + differences = self._coordinate_differences(other, "derived_metadata") + msgs.append(msg_template.format("Derived coordinates", *differences)) # Check scalar coordinates. if self.scalar_coords != other.scalar_coords: differences = self._coordinate_differences( other, "scalar_coords", reason="values or metadata" ) - msgs.append( - msg_template.format("Scalar coordinates", *differences) - ) + msgs.append(msg_template.format("Scalar coordinates", *differences)) # Check ndim. if self.ndim != other.ndim: msgs.append( - msg_template.format( - "Data dimensions", "", self.ndim, other.ndim - ) + msg_template.format("Data dimensions", "", self.ndim, other.ndim) ) # Check data type. if self.data_type != other.data_type: msgs.append( - msg_template.format( - "Data types", "", self.data_type, other.data_type - ) + msg_template.format("Data types", "", self.data_type, other.data_type) ) match = not bool(msgs) @@ -679,16 +659,13 @@ def __init__(self, cube_signature): """ self.aux_coords_and_dims = cube_signature.aux_coords_and_dims self.cell_measures_and_dims = cube_signature.cell_measures_and_dims - self.ancillary_variables_and_dims = ( - cube_signature.ancillary_variables_and_dims - ) + self.ancillary_variables_and_dims = cube_signature.ancillary_variables_and_dims self.derived_coords_and_dims = cube_signature.derived_coords_and_dims self.dim_coords = cube_signature.dim_coords self.dim_mapping = cube_signature.dim_mapping self.dim_extents = [] self.dim_order = [ - metadata.kwargs["order"] - for metadata in cube_signature.dim_metadata + metadata.kwargs["order"] for metadata in cube_signature.dim_metadata ] # Calculate the extents for each dimensional coordinate. @@ -705,27 +682,18 @@ def _cmp(coord, other): """ # A candidate axis must have non-identical coordinate points. - candidate_axis = not array_equal( - coord.core_points(), other.core_points() - ) + candidate_axis = not array_equal(coord.core_points(), other.core_points()) if candidate_axis: # Ensure both have equal availability of bounds. - result = (coord.core_bounds() is None) == ( - other.core_bounds() is None - ) + result = (coord.core_bounds() is None) == (other.core_bounds() is None) else: - if ( - coord.core_bounds() is not None - and other.core_bounds() is not None - ): + if coord.core_bounds() is not None and other.core_bounds() is not None: # Ensure equality of bounds. result = array_equal(coord.core_bounds(), other.core_bounds()) else: # Ensure both have equal availability of bounds. - result = ( - coord.core_bounds() is None and other.core_bounds() is None - ) + result = coord.core_bounds() is None and other.core_bounds() is None return result, candidate_axis @@ -967,9 +935,8 @@ def register( """ # Verify and assert the nominated axis. if axis is not None and self.axis is not None and self.axis != axis: - msg = ( - "Nominated axis [{}] is not equal " - "to negotiated axis [{}]".format(axis, self.axis) + msg = "Nominated axis [{}] is not equal to negotiated axis [{}]".format( + axis, self.axis ) raise ValueError(msg) @@ -980,9 +947,7 @@ def register( # Check for compatible coordinate signatures. if match: coord_signature = _CoordSignature(cube_signature) - candidate_axis = self._coord_signature.candidate_axis( - coord_signature - ) + candidate_axis = self._coord_signature.candidate_axis(coord_signature) match = candidate_axis is not None and ( candidate_axis == axis or axis is None ) @@ -990,9 +955,7 @@ def register( # Check for compatible coordinate extents. if match: dim_ind = self._coord_signature.dim_mapping.index(candidate_axis) - match = self._sequence( - coord_signature.dim_extents[dim_ind], candidate_axis - ) + match = self._sequence(coord_signature.dim_extents[dim_ind], candidate_axis) if error_on_mismatch and not match: msg = f"Found cubes with overlap on concatenate axis {candidate_axis}, cannot concatenate overlapping cubes" raise iris.exceptions.ConcatenateError([msg]) @@ -1131,9 +1094,7 @@ def _build_aux_coordinates(self): bnds = None if coord.has_bounds(): bnds = [ - skton.signature.aux_coords_and_dims[ - i - ].coord.core_bounds() + skton.signature.aux_coords_and_dims[i].coord.core_bounds() for skton in skeletons ] bnds = np.concatenate(tuple(bnds), axis=dim) @@ -1148,16 +1109,12 @@ def _build_aux_coordinates(self): # Attempt to create a DimCoord, otherwise default to # an AuxCoord on failure. try: - coord = iris.coords.DimCoord( - points, bounds=bnds, **kwargs - ) + coord = iris.coords.DimCoord(points, bounds=bnds, **kwargs) except ValueError: # Ensure to remove the "circular" kwarg, which may be # present in the defn of a DimCoord being demoted. _ = kwargs.pop("circular", None) - coord = iris.coords.AuxCoord( - points, bounds=bnds, **kwargs - ) + coord = iris.coords.AuxCoord(points, bounds=bnds, **kwargs) aux_coords_and_dims.append((coord.copy(), dims)) @@ -1231,9 +1188,7 @@ def _build_ancillary_variables(self): ancillary_variables_and_dims = [] # Generate all the ancillary variables for the new concatenated cube. - for i, (av, dims) in enumerate( - cube_signature.ancillary_variables_and_dims - ): + for i, (av, dims) in enumerate(cube_signature.ancillary_variables_and_dims): # Check whether the ancillary variable spans the nominated # dimension of concatenation. if self.axis in dims: @@ -1415,8 +1370,7 @@ def _sequence(self, extent, axis): # Add the new extent to the current extents collection. dim_ind = self._coord_signature.dim_mapping.index(axis) dim_extents = [ - skeleton.signature.dim_extents[dim_ind] - for skeleton in self._skeletons + skeleton.signature.dim_extents[dim_ind] for skeleton in self._skeletons ] dim_extents.append(extent) diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 82225ec516..cbac8cbca4 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -101,9 +101,7 @@ def latitude_bands(cell): if not (name is None or isinstance(name, str)): raise TypeError("name must be None or string, got %r" % name) if not (cube_func is None or callable(cube_func)): - raise TypeError( - "cube_func must be None or callable, got %r" % cube_func - ) + raise TypeError("cube_func must be None or callable, got %r" % cube_func) if not (coord_values is None or isinstance(coord_values, Mapping)): raise TypeError( "coord_values must be None or a " @@ -126,9 +124,7 @@ def latitude_bands(cell): self._coord_constraints = [] for coord_name, coord_thing in self._coord_values.items(): - self._coord_constraints.append( - _CoordConstraint(coord_name, coord_thing) - ) + self._coord_constraints.append(_CoordConstraint(coord_name, coord_thing)) def __eq__(self, other): # Equivalence is defined, but is naturally limited for any Constraints @@ -267,9 +263,7 @@ def __repr__(self): ) def _CIM_extract(self, cube): - return self.operator( - self.lhs._CIM_extract(cube), self.rhs._CIM_extract(cube) - ) + return self.operator(self.lhs._CIM_extract(cube), self.rhs._CIM_extract(cube)) class _CoordConstraint: @@ -472,9 +466,7 @@ def as_slice(self): delta = np.diff(where_true, axis=0) # if the diff is consistent we can create a slice object if all(delta[0] == delta): - result[dim] = slice( - where_true[0], where_true[-1] + 1, delta[0] - ) + result[dim] = slice(where_true[0], where_true[-1] + 1, delta[0]) else: # otherwise, key is a tuple result[dim] = tuple(where_true) @@ -638,8 +630,7 @@ def __init__( def __eq__(self, other): eq = isinstance(other, NameConstraint) and all( - getattr(self, attname) == getattr(other, attname) - for attname in self._names + getattr(self, attname) == getattr(other, attname) for attname in self._names ) return eq diff --git a/lib/iris/_data_manager.py b/lib/iris/_data_manager.py index 9ea4481307..ea62ff5fb9 100644 --- a/lib/iris/_data_manager.py +++ b/lib/iris/_data_manager.py @@ -145,9 +145,7 @@ def _assert_axioms(self): is_real = self._real_array is not None emsg = "Unexpected data state, got {}lazy and {}real data." state = is_lazy ^ is_real - assert state, emsg.format( - "" if is_lazy else "no ", "" if is_real else "no " - ) + assert state, emsg.format("" if is_lazy else "no ", "" if is_real else "no ") def _deepcopy(self, memo, data=None): """ @@ -245,9 +243,7 @@ def data(self, data): # Determine whether the class instance has been created, # as this method is called from within the __init__. - init_done = ( - self._lazy_array is not None or self._real_array is not None - ) + init_done = self._lazy_array is not None or self._real_array is not None if init_done and self.shape != data.shape: # The _ONLY_ data reshape permitted is converting a 0-dimensional diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 11477a2fa6..8c2f33b175 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -380,9 +380,7 @@ def multidim_lazy_stack(stack): result = da.stack(list(stack)) else: # Recurse because dask.stack does not do multi-dimensional. - result = da.stack( - [multidim_lazy_stack(subarray) for subarray in stack] - ) + result = da.stack([multidim_lazy_stack(subarray) for subarray in stack]) return result diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index a8f079e70e..d38c9e4982 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -34,9 +34,7 @@ # # Private namedtuple wrapper classes. # -class _Template( - namedtuple("Template", ["dims", "points", "bounds", "kwargs"]) -): +class _Template(namedtuple("Template", ["dims", "points", "bounds", "kwargs"])): """ Common framework from which to build a dimension or auxiliary coordinate. @@ -136,9 +134,7 @@ class _ScalarCoordPayload( class _VectorCoordPayload( - namedtuple( - "VectorCoordPayload", ["dim_coords_and_dims", "aux_coords_and_dims"] - ) + namedtuple("VectorCoordPayload", ["dim_coords_and_dims", "aux_coords_and_dims"]) ): """ Container for all vector coordinate data and metadata represented @@ -163,9 +159,7 @@ class _VectorCoordPayload( __slots__ = () -class _CoordPayload( - namedtuple("CoordPayload", ["scalar", "vector", "factory_defns"]) -): +class _CoordPayload(namedtuple("CoordPayload", ["scalar", "vector", "factory_defns"])): """ Container for all the scalar and vector coordinate data and metadata, and auxiliary coordinate factories represented within a @@ -415,9 +409,7 @@ def _defn_msgs(self, other_defn): ] diff_attrs = ", ".join(sorted(diff_attrs)) msgs.append( - "cube.attributes values differ for keys: {}".format( - diff_attrs - ) + "cube.attributes values differ for keys: {}".format(diff_attrs) ) if self_defn.cell_methods != other_defn.cell_methods: msgs.append("cube.cell_methods differ") @@ -460,10 +452,7 @@ def match(self, other, error_on_mismatch): # mismatch to be caused by a difference in order. if self.cell_measures_and_dims != other.cell_measures_and_dims: msgs.append("cube.cell_measures differ") - if ( - self.ancillary_variables_and_dims - != other.ancillary_variables_and_dims - ): + if self.ancillary_variables_and_dims != other.ancillary_variables_and_dims: msgs.append("cube.ancillary_variables differ") match = not bool(msgs) @@ -605,9 +594,7 @@ def build_indexes(positions): value_index_by_name = name_index_by_scalar[value] for other_name in names: if other_name != name: - value_index_by_name[other_name].add( - position[other_name] - ) + value_index_by_name[other_name].add(position[other_name]) else: name_index_by_scalar[value] = { other_name: set((position[other_name],)) @@ -860,9 +847,7 @@ def _derive_consistent_groups(relation_matrix, separable_group): result = [] for name in separable_group: - name_separable_group = ( - relation_matrix[name].separable & separable_group - ) + name_separable_group = relation_matrix[name].separable & separable_group candidate = list(name_separable_group) + [name] valid = True @@ -932,9 +917,7 @@ def _build_separable_group( for name in dependent: function_mapping = {} - valid = _is_dependent( - name, independent, positions, function_mapping - ) + valid = _is_dependent(name, independent, positions, function_mapping) if not valid: break @@ -1002,9 +985,7 @@ def _build_inseparable_group(space, group, positions, function_matrix): for name in dependent: function_mapping = {} - valid = _is_dependent( - name, independent, positions, function_mapping - ) + valid = _is_dependent(name, independent, positions, function_mapping) if not valid: break @@ -1126,22 +1107,16 @@ def derive_space(groups, relation_matrix, positions, function_matrix=None): # There is no relationship between any of the candidate # dimensions in the separable group, so merge them together # into a new combined dimension of the space. - _build_combination_group( - space, group, positions, function_matrix - ) + _build_combination_group(space, group, positions, function_matrix) else: # Determine whether there is a scalar relationship between one of # the candidate dimensions and each of the other candidate # dimensions in this inseparable group. - if not _build_inseparable_group( - space, group, positions, function_matrix - ): + if not _build_inseparable_group(space, group, positions, function_matrix): # There is no relationship between any of the candidate # dimensions in this inseparable group, so merge them together # into a new combined dimension of the space. - _build_combination_group( - space, group, positions, function_matrix - ) + _build_combination_group(space, group, positions, function_matrix) return space @@ -1455,9 +1430,7 @@ def axis_and_name(name): cells = OrderedDict( ( tuple( - position[ - int(member) if member.isdigit() else member - ] + position[int(member) if member.isdigit() else member] for member in members ), None, @@ -1529,10 +1502,7 @@ def name_in_independents(): if name_independents is not None: # Calculate the auxiliary coordinate shape. dims = tuple( - [ - dim_by_name[independent] - for independent in name_independents - ] + [dim_by_name[independent] for independent in name_independents] ) aux_shape = [self._shape[dim] for dim in dims] # Create empty points and bounds in preparation to be filled. @@ -1552,9 +1522,7 @@ def name_in_independents(): # dimension coordinate/s. index = [] - name_function_pairs = zip( - name_independents, function_independents - ) + name_function_pairs = zip(name_independents, function_independents) for independent, independent_value in name_function_pairs: cache = self._cache_by_name[independent] index.append(cache[independent_value]) @@ -1567,9 +1535,7 @@ def name_in_independents(): bounds[index] = name_value.bound kwargs = dict(zip(CoordMetadata._fields, defns[name])) - self._aux_templates.append( - _Template(dims, points, bounds, kwargs) - ) + self._aux_templates.append(_Template(dims, points, bounds, kwargs)) # Calculate the dimension mapping for each vector within the space. offset = len(self._shape) @@ -1597,8 +1563,7 @@ def _get_cube(self, data): (deepcopy(coord), dim) for coord, dim in self._dim_coords_and_dims ] aux_coords_and_dims = [ - (deepcopy(coord), dims) - for coord, dims in self._aux_coords_and_dims + (deepcopy(coord), dims) for coord, dims in self._aux_coords_and_dims ] kwargs = dict(zip(CubeMetadata._fields, signature.defn)) @@ -1606,8 +1571,7 @@ def _get_cube(self, data): (deepcopy(cm), dims) for cm, dims in self._cell_measures_and_dims ] avs_and_dims = [ - (deepcopy(av), dims) - for av, dims in self._ancillary_variables_and_dims + (deepcopy(av), dims) for av, dims in self._ancillary_variables_and_dims ] cube = iris.cube.Cube( data, @@ -1698,18 +1662,11 @@ def _build_coordinates(self): coord = iris.coords.DimCoord( template.points, bounds=template.bounds, **template.kwargs ) - if ( - len(template.dims) == 1 - and template.dims[0] not in covered_dims - ): - dim_coords_and_dims.append( - _CoordAndDims(coord, template.dims) - ) + if len(template.dims) == 1 and template.dims[0] not in covered_dims: + dim_coords_and_dims.append(_CoordAndDims(coord, template.dims)) covered_dims.append(template.dims[0]) else: - aux_coords_and_dims.append( - _CoordAndDims(coord, template.dims) - ) + aux_coords_and_dims.append(_CoordAndDims(coord, template.dims)) except ValueError: # kwarg not applicable to AuxCoord. template.kwargs.pop("circular", None) @@ -1791,9 +1748,7 @@ def _extract_coord_payload(self, cube): # Copes with duplicate hint entries, where the most preferred is king. hint_dict = { name: i - for i, name in zip( - range(len(self._hints), 0, -1), self._hints[::-1] - ) + for i, name in zip(range(len(self._hints), 0, -1), self._hints[::-1]) } # Coordinate axis ordering dictionary. axis_dict = {"T": 0, "Z": 1, "Y": 2, "X": 3} @@ -1808,9 +1763,7 @@ def key_func(coord): not np.issubdtype(points_dtype, np.number), not isinstance(coord, iris.coords.DimCoord), hint_dict.get(coord.name(), len(hint_dict) + 1), - axis_dict.get( - iris.util.guess_coord_axis(coord), len(axis_dict) + 1 - ), + axis_dict.get(iris.util.guess_coord_axis(coord), len(axis_dict) + 1), coord.metadata, ) @@ -1841,18 +1794,12 @@ def key_func(coord): else: # Extract the vector coordinate and metadata. if id(coord) in cube_aux_coord_ids: - vector_aux_coords_and_dims.append( - _CoordAndDims(coord, dims) - ) + vector_aux_coords_and_dims.append(_CoordAndDims(coord, dims)) else: - vector_dim_coords_and_dims.append( - _CoordAndDims(coord, dims) - ) + vector_dim_coords_and_dims.append(_CoordAndDims(coord, dims)) factory_defns = [] - for factory in sorted( - cube.aux_factories, key=lambda factory: factory.metadata - ): + for factory in sorted(cube.aux_factories, key=lambda factory: factory.metadata): dependency_defns = [] dependencies = factory.dependencies for key in sorted(dependencies): @@ -1862,9 +1809,7 @@ def key_func(coord): factory_defn = _FactoryDefn(type(factory), dependency_defns) factory_defns.append(factory_defn) - scalar = _ScalarCoordPayload( - scalar_defns, scalar_values, scalar_metadata - ) + scalar = _ScalarCoordPayload(scalar_defns, scalar_values, scalar_metadata) vector = _VectorCoordPayload( vector_dim_coords_and_dims, vector_aux_coords_and_dims ) diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index 9239c96949..3044d072f9 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -67,10 +67,7 @@ def add_row(self, cols, aligns, i_col_unlimited=None): """ n_cols = len(cols) if len(aligns) != n_cols: - msg = ( - f"Number of aligns ({len(aligns)})" - f" != number of cols ({n_cols})" - ) + msg = f"Number of aligns ({len(aligns)})" f" != number of cols ({n_cols})" raise ValueError(msg) if self.n_columns is not None: # For now, all rows must have same number of columns @@ -104,18 +101,13 @@ def formatted_as_strings(self): result_lines = [] for row in self.rows: col_texts = [] - for col, align, width in zip( - row.cols, row.aligns, self.col_widths - ): + for col, align, width in zip(row.cols, row.aligns, self.col_widths): if align == "left": col_text = col.ljust(width) elif align == "right": col_text = col.rjust(width) else: - msg = ( - f'Unknown alignment "{align}" ' - 'not in ("left", "right")' - ) + msg = f'Unknown alignment "{align}" ' 'not in ("left", "right")' raise ValueError(msg) col_texts.append(col_text) diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 1094588fa6..58730af2b5 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -22,9 +22,7 @@ def __init__(self, cube): self.scalar = False self.dim_names = [] for dim in range(len(cube.shape)): - dim_coords = cube.coords( - contains_dimension=dim, dim_coords=True - ) + dim_coords = cube.coords(contains_dimension=dim, dim_coords=True) if dim_coords: self.dim_names.append(dim_coords[0].name()) else: @@ -40,9 +38,7 @@ class FullHeader: def __init__(self, cube, name_padding=35): self.name = cube.name() self.unit = cube.units - self.nameunit = "{name} / ({units})".format( - name=self.name, units=self.unit - ) + self.nameunit = "{name} / ({units})".format(name=self.name, units=self.unit) self.name_padding = name_padding self.dimension_header = DimensionHeader(cube) @@ -138,9 +134,7 @@ class VectorSummary(CoordSummary): def __init__(self, cube, vector, iscoord): self.name = iris.util.clip_string(vector.name()) dims = vector.cube_dims(cube) - self.dim_chars = [ - "x" if dim in dims else "-" for dim in range(len(cube.shape)) - ] + self.dim_chars = ["x" if dim in dims else "-" for dim in range(len(cube.shape))] if iscoord: extra = self._summary_coord_extra(cube, vector) self.extra = iris.util.clip_string(extra) @@ -199,17 +193,13 @@ def is_empty(self): class VectorSection(Section): def __init__(self, title, cube, vectors, iscoord): self.title = title - self.contents = [ - VectorSummary(cube, vector, iscoord) for vector in vectors - ] + self.contents = [VectorSummary(cube, vector, iscoord) for vector in vectors] class ScalarCoordSection(Section): def __init__(self, title, cube, scalars): self.title = title - self.contents = [ - ScalarCoordSummary(cube, scalar) for scalar in scalars - ] + self.contents = [ScalarCoordSummary(cube, scalar) for scalar in scalars] class ScalarCellMeasureSection(Section): @@ -302,9 +292,7 @@ def __init__(self, cube, name_padding=35): if cube.mesh is None: mesh_coords = [] else: - mesh_coords = [ - coord for coord in aux_coords if hasattr(coord, "mesh") - ] + mesh_coords = [coord for coord in aux_coords if hasattr(coord, "mesh")] vector_aux_coords = [ coord @@ -312,9 +300,7 @@ def __init__(self, cube, name_padding=35): if (id(coord) not in scalar_coord_ids and coord not in mesh_coords) ] vector_derived_coords = [ - coord - for coord in derived_coords - if id(coord) not in scalar_coord_ids + coord for coord in derived_coords if id(coord) not in scalar_coord_ids ] # Ancillary Variables @@ -338,12 +324,8 @@ def __init__(self, cube, name_padding=35): # Sort scalar coordinates by name. scalar_coords.sort(key=lambda coord: coord.name()) # Sort vector coordinates by data dimension and name. - vector_dim_coords.sort( - key=lambda coord: (cube.coord_dims(coord), coord.name()) - ) - vector_aux_coords.sort( - key=lambda coord: (cube.coord_dims(coord), coord.name()) - ) + vector_dim_coords.sort(key=lambda coord: (cube.coord_dims(coord), coord.name())) + vector_aux_coords.sort(key=lambda coord: (cube.coord_dims(coord), coord.name())) vector_derived_coords.sort( key=lambda coord: (cube.coord_dims(coord), coord.name()) ) @@ -351,18 +333,14 @@ def __init__(self, cube, name_padding=35): self.vector_sections = {} def add_vector_section(title, contents, iscoord=True): - self.vector_sections[title] = VectorSection( - title, cube, contents, iscoord - ) + self.vector_sections[title] = VectorSection(title, cube, contents, iscoord) add_vector_section("Dimension coordinates:", vector_dim_coords) add_vector_section("Mesh coordinates:", mesh_coords) add_vector_section("Auxiliary coordinates:", vector_aux_coords) add_vector_section("Derived coordinates:", vector_derived_coords) add_vector_section("Cell measures:", vector_cell_measures, False) - add_vector_section( - "Ancillary variables:", vector_ancillary_variables, False - ) + add_vector_section("Ancillary variables:", vector_ancillary_variables, False) self.scalar_sections = {} @@ -384,7 +362,5 @@ def add_scalar_section(section_class, title, *args): "Scalar ancillary variables:", scalar_ancillary_variables, ) - add_scalar_section( - CellMethodSection, "Cell methods:", cube.cell_methods - ) + add_scalar_section(CellMethodSection, "Cell methods:", cube.cell_methods) add_scalar_section(AttributeSection, "Attributes:", cube.attributes) diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 76dd52de6e..2d5706d4f5 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -54,10 +54,7 @@ import iris._lazy_data from iris.analysis._area_weighted import AreaWeightedRegridder -from iris.analysis._interpolation import ( - EXTRAPOLATION_MODES, - RectilinearInterpolator, -) +from iris.analysis._interpolation import EXTRAPOLATION_MODES, RectilinearInterpolator from iris.analysis._regrid import CurvilinearRegridder, RectilinearRegridder import iris.coords from iris.coords import _DimensionalMetadata @@ -130,10 +127,7 @@ def __repr__(self): return ( "[" + ", ".join( - [ - coord.name() if coord is not None else "None" - for coord in self - ] + [coord.name() if coord is not None else "None" for coord in self] ) + "]" ) @@ -422,21 +416,17 @@ def no_data_dim_fn(cube, coord): result["grouped_coords"] - result["non_equal_data_dimension"] ) result["equal"] = result["grouped_coords"] - result["not_equal"] - result["dimensioned"] = ( - result["grouped_coords"] - result["no_data_dimension"] - ) + result["dimensioned"] = result["grouped_coords"] - result["no_data_dimension"] result["ungroupable_and_dimensioned"] = ( result["ungroupable"] & result["dimensioned"] ) - result["ignorable"] = ( - result["not_equal"] | result["ungroupable"] - ) & result["no_data_dimension"] + result["ignorable"] = (result["not_equal"] | result["ungroupable"]) & result[ + "no_data_dimension" + ] result["resamplable"] = ( result["not_equal"] & result["equal_data_dimension"] - result["scalar"] ) - result["transposable"] = ( - result["equal"] & result["non_equal_data_dimension"] - ) + result["transposable"] = result["equal"] & result["non_equal_data_dimension"] # for convenience, turn all of the sets in the dictionary into lists, # sorted by the name of the group @@ -597,11 +587,7 @@ def aggregate(self, data, axis, **kwargs): mdtol = kwargs.pop("mdtol", None) result = self.call_func(data, axis=axis, **kwargs) - if ( - mdtol is not None - and ma.is_masked(data) - and result is not ma.masked - ): + if mdtol is not None and ma.is_masked(data) and result is not ma.masked: fraction_not_missing = data.count(axis=axis) / data.shape[axis] mask_update = np.array(1 - mdtol > fraction_not_missing) if np.array(result).ndim > mask_update.ndim: @@ -878,9 +864,7 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): # order cube. for point in points: cube = collapsed_cube.copy() - coord = iris.coords.AuxCoord( - point, long_name=coord_name, units="percent" - ) + coord = iris.coords.AuxCoord(point, long_name=coord_name, units="percent") cube.add_aux_coord(coord) cubes.append(cube) @@ -1087,7 +1071,7 @@ def update_metadata(self, cube, coords, **kwargs): for coord in coords: if not isinstance(coord, iris.coords.Coord): raise TypeError( - "Coordinate instance expected to the " "Aggregator object." + "Coordinate instance expected to the Aggregator object." ) coord_names.append(coord.name()) @@ -1362,9 +1346,7 @@ def inner_stat(array, axis=-1, mdtol=None, **kwargs): dask_result.shape, ) # Return an mdtol-masked version of the basic result. - result = da.ma.masked_array( - da.ma.getdata(dask_result), boolean_mask - ) + result = da.ma.masked_array(da.ma.getdata(dask_result), boolean_mask) return result return inner_stat @@ -1431,9 +1413,7 @@ def _calc_percentile(data, percent, fast_percentile_method=False, **kwargs): quantiles = percent / 100.0 for key in ["alphap", "betap"]: kwargs.setdefault(key, 1) - result = scipy.stats.mstats.mquantiles( - data, quantiles, axis=-1, **kwargs - ) + result = scipy.stats.mstats.mquantiles(data, quantiles, axis=-1, **kwargs) if not ma.isMaskedArray(data) and not ma.is_masked(result): return np.asarray(result) else: @@ -1536,9 +1516,7 @@ def _weighted_quantile_1D(data, weights, quantiles, **kwargs): return result -def _weighted_percentile( - data, axis, weights, percent, returned=False, **kwargs -): +def _weighted_percentile(data, axis, weights, percent, returned=False, **kwargs): """ The weighted_percentile aggregator is an additive operation. This means that it *may* introduce a new dimension to the data for the statistic being @@ -1668,9 +1646,7 @@ def _lazy_max_run(array, axis=-1, **kwargs): emsg = "function must be a callable. Got {}." raise TypeError(emsg.format(type(func))) bool_array = da.ma.getdata(func(array)) - bool_array = da.logical_and( - bool_array, da.logical_not(da.ma.getmaskarray(array)) - ) + bool_array = da.logical_and(bool_array, da.logical_not(da.ma.getmaskarray(array))) padding = [(0, 0)] * array.ndim padding[axis] = (0, 1) ones_zeros = da.pad(bool_array, padding).astype(int) @@ -1733,13 +1709,9 @@ def _sum(array, **kwargs): weights = al.ones_like(array) if al is da: # Dask version of ones_like does not preserve masks. See dask#9301. - weights = da.ma.masked_array( - weights, da.ma.getmaskarray(array) - ) + weights = da.ma.masked_array(weights, da.ma.getmaskarray(array)) else: - weights = al.ma.masked_array( - weights_in, mask=al.ma.getmaskarray(array) - ) + weights = al.ma.masked_array(weights_in, mask=al.ma.getmaskarray(array)) rvalue = (wsum, np.sum(weights, axis=axis_in)) else: rvalue = wsum @@ -1777,9 +1749,7 @@ def column_segments(column): if index != nan_index: columns.append(column[:nan_index]) elif nan_indices[index - 1] != (nan_index - 1): - columns.append( - column[nan_indices[index - 1] + 1 : nan_index] - ) + columns.append(column[nan_indices[index - 1] + 1 : nan_index]) if nan_indices[-1] != len(column) - 1: columns.append(column[nan_indices[-1] + 1 :]) return columns @@ -1818,8 +1788,7 @@ def interp_order(length): # Check if the column slice contains a single value, nans only, # masked values only or if the values are all equal. equal_slice = ( - np.ones(column_slice.size, dtype=column_slice.dtype) - * column_slice[0] + np.ones(column_slice.size, dtype=column_slice.dtype) * column_slice[0] ) if ( column_slice.size == 1 @@ -2046,9 +2015,7 @@ def interp_order(length): """ -MIN = Aggregator( - "minimum", ma.min, lazy_func=_build_dask_mdtol_function(da.min) -) +MIN = Aggregator("minimum", ma.min, lazy_func=_build_dask_mdtol_function(da.min)) """ An :class:`~iris.analysis.Aggregator` instance that calculates the minimum over a :class:`~iris.cube.Cube`, as computed by @@ -2065,9 +2032,7 @@ def interp_order(length): """ -MAX = Aggregator( - "maximum", ma.max, lazy_func=_build_dask_mdtol_function(da.max) -) +MAX = Aggregator("maximum", ma.max, lazy_func=_build_dask_mdtol_function(da.max)) """ An :class:`~iris.analysis.Aggregator` instance that calculates the maximum over a :class:`~iris.cube.Cube`, as computed by @@ -2426,9 +2391,7 @@ def __init__( self._stop = None # Ensure group-by coordinates are iterable. if not isinstance(groupby_coords, Iterable): - raise TypeError( - "groupby_coords must be a `collections.Iterable` type." - ) + raise TypeError("groupby_coords must be a `collections.Iterable` type.") # Add valid group-by coordinates. for coord in groupby_coords: @@ -2438,9 +2401,7 @@ def __init__( if shared_coords is not None: # Ensure shared coordinates are iterable. if not isinstance(shared_coords, Iterable): - raise TypeError( - "shared_coords must be a `collections.Iterable` type." - ) + raise TypeError("shared_coords must be a `collections.Iterable` type.") # Add valid shared coordinates. for coord, dim in shared_coords: self._add_shared_coord(coord, dim) @@ -2494,9 +2455,7 @@ def group_iterator(points): groups = [group_iterator(c.points) for c in self._groupby_coords] groupby_slices = [next(group) for group in groups] - indices_by_key: dict[ - tuple[Union[Number, str], ...], list[int] - ] = {} + indices_by_key: dict[tuple[Union[Number, str], ...], list[int]] = {} while any(s is not None for s in groupby_slices): # Determine the extent (start, stop) of the group given # each current group-by coordinate group. @@ -2504,9 +2463,7 @@ def group_iterator(points): stop = min(s.stop for s in groupby_slices if s is not None) # Construct composite group key for the group using the # start value from each group-by coordinate. - key = tuple( - coord.points[start] for coord in self._groupby_coords - ) + key = tuple(coord.points[start] for coord in self._groupby_coords) # Associate group slice with group key within the ordered # dictionary. indices_by_key.setdefault(key, []).extend(range(start, stop)) @@ -2613,8 +2570,7 @@ def _compute_shared_coords(self) -> None: new_bounds_list.append( [ first_choices.take(start, dim), - first_choices.take(0, dim) - + coord.units.modulus, + first_choices.take(0, dim) + coord.units.modulus, ] ) else: @@ -2638,9 +2594,7 @@ def _compute_shared_coords(self) -> None: # Bounds needs to be an array with the length 2 start-stop # dimension last, and the aggregated dimension back in its # original position. - new_bounds = np.moveaxis( - np.array(new_bounds_list), (0, 1), (dim, -1) - ) + new_bounds = np.moveaxis(np.array(new_bounds_list), (0, 1), (dim, -1)) # Now create the new bounded group shared coordinate. try: @@ -2912,9 +2866,7 @@ def regridder(self, src_grid_cube, target_grid_cube): that is to be regridded to the grid of `target_grid_cube`. """ - return AreaWeightedRegridder( - src_grid_cube, target_grid_cube, mdtol=self.mdtol - ) + return AreaWeightedRegridder(src_grid_cube, target_grid_cube, mdtol=self.mdtol) class Nearest: @@ -2998,9 +2950,7 @@ def interpolator(self, cube, coords): `[new_lat_values, new_lon_values]`. """ - return RectilinearInterpolator( - cube, coords, "nearest", self.extrapolation_mode - ) + return RectilinearInterpolator(cube, coords, "nearest", self.extrapolation_mode) def regridder(self, src_grid, target_grid): """ @@ -3118,9 +3068,7 @@ def regridder(self, src_cube, target_grid): that is to be regridded to the `target_grid`. """ - from iris.analysis.trajectory import ( - UnstructuredNearestNeigbourRegridder, - ) + from iris.analysis.trajectory import UnstructuredNearestNeigbourRegridder return UnstructuredNearestNeigbourRegridder(src_cube, target_grid) diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index bd2ad90a3a..3916088e33 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -173,8 +173,7 @@ def _get_xy_coords(cube): ] if len(x_coords) != 1: raise ValueError( - "Cube {!r} must contain a single 1D x " - "coordinate.".format(cube.name()) + "Cube {!r} must contain a single 1D x coordinate.".format(cube.name()) ) x_coord = x_coords[0] @@ -190,8 +189,7 @@ def _get_xy_coords(cube): ] if len(y_coords) != 1: raise ValueError( - "Cube {!r} must contain a single 1D y " - "coordinate.".format(cube.name()) + "Cube {!r} must contain a single 1D y coordinate.".format(cube.name()) ) y_coord = y_coords[0] @@ -216,8 +214,7 @@ def _get_xy_coords(cube): if x_dim is not None and y_dim == x_dim: raise ValueError( - "The cube's x and y coords must not describe the " - "same data dimension." + "The cube's x and y coords must not describe the same data dimension." ) return x_coord, y_coord @@ -231,14 +228,12 @@ def _get_bounds_in_units(coord, units, dtype): """ # The bounds are cast to dtype before conversion to prevent issues when # mixing float32 and float64 types. - return coord.units.convert( - coord.contiguous_bounds().astype(dtype), units - ).astype(dtype) + return coord.units.convert(coord.contiguous_bounds().astype(dtype), units).astype( + dtype + ) -def _regrid_area_weighted_rectilinear_src_and_grid__prepare( - src_cube, grid_cube -): +def _regrid_area_weighted_rectilinear_src_and_grid__prepare(src_cube, grid_cube): """ First (setup) part of 'regrid_area_weighted_rectilinear_src_and_grid'. @@ -355,9 +350,7 @@ def _calculate_regrid_area_weighted_weights( src_x_bounds, grid_x_bounds, circular=spherical, mod=modulus ) y_info = _get_coord_to_coord_matrix_info(src_y_bounds, grid_y_bounds) - weights_matrix = _combine_xy_weights( - x_info, y_info, src_shape, tgt_shape - ) + weights_matrix = _combine_xy_weights(x_info, y_info, src_shape, tgt_shape) return weights_matrix weights = _calculate_regrid_area_weighted_weights( @@ -460,9 +453,7 @@ def regrid_callback(*args, **kwargs): return new_cube -def _get_coord_to_coord_matrix_info( - src_bounds, tgt_bounds, circular=False, mod=None -): +def _get_coord_to_coord_matrix_info(src_bounds, tgt_bounds, circular=False, mod=None): """ First part of weight calculation. @@ -671,9 +662,7 @@ def _standard_regrid(data, weights, tgt_shape, mdtol): if ma.isMaskedArray(data): # If the source is masked, the result should have a similar mask. fill_value = data.fill_value - normalisations = ma.array( - normalisations, mask=~tgt_mask, fill_value=fill_value - ) + normalisations = ma.array(normalisations, mask=~tgt_mask, fill_value=fill_value) elif np.any(~tgt_mask): normalisations = ma.array(normalisations, mask=~tgt_mask) @@ -682,9 +671,7 @@ def _standard_regrid(data, weights, tgt_shape, mdtol): dtype = np.promote_types(data.dtype, np.float16) # Perform regridding on unmasked data. - result = _standard_regrid_no_masks( - ma.filled(data, 0.0), weights, tgt_shape - ) + result = _standard_regrid_no_masks(ma.filled(data, 0.0), weights, tgt_shape) # Apply normalisations and masks to the regridded data. result = result * normalisations result = result.astype(dtype) diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 86a0c38086..ae2fb433c1 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -331,9 +331,7 @@ def transform_xy_arrays(x, y): rhs = np.roll(mid, -1, 2) if not x_coord: # Create coords for result cube : with no bounds. - y_coord = iris.coords.AuxCoord( - x, standard_name="latitude", units="degrees" - ) + y_coord = iris.coords.AuxCoord(x, standard_name="latitude", units="degrees") x_coord = iris.coords.AuxCoord( y, standard_name="longitude", units="degrees" ) @@ -355,14 +353,9 @@ def transform_xy_arrays(x, y): lhs_xyz = 0.5 * (xyz[..., 0] + xyz[..., 3]) rhs_xyz = 0.5 * (xyz[..., 1] + xyz[..., 2]) else: - msg = ( - 'unrecognised cell_angle_boundpoints of "{}", ' - "must be one of {}" - ) + msg = 'unrecognised cell_angle_boundpoints of "{}", ' "must be one of {}" raise ValueError( - msg.format( - cell_angle_boundpoints, list(angle_boundpoints_vals.keys()) - ) + msg.format(cell_angle_boundpoints, list(angle_boundpoints_vals.keys())) ) if not x_coord: # Create bounded coords for result cube. @@ -400,9 +393,7 @@ def transform_xy_arrays(x, y): return result -def rotate_grid_vectors( - u_cube, v_cube, grid_angles_cube=None, grid_angles_kwargs=None -): +def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwargs=None): """ Rotate distance vectors from grid-oriented to true-latlon-oriented. diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index 091d29d7e2..f0d31e4361 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -85,9 +85,7 @@ def extend_circular_data(data, coord_dim): coord_slice_in_cube[coord_dim] = slice(0, 1) mod = ma if ma.isMaskedArray(data) else np - data = mod.concatenate( - (data, data[tuple(coord_slice_in_cube)]), axis=coord_dim - ) + data = mod.concatenate((data, data[tuple(coord_slice_in_cube)]), axis=coord_dim) return data @@ -139,16 +137,14 @@ def get_xy_coords(cube, dim_coords=False): x_coords = cube.coords(axis="x", dim_coords=dim_coords) if len(x_coords) != 1 or x_coords[0].ndim != 1: raise ValueError( - "Cube {!r} must contain a single 1D x " - "coordinate.".format(cube.name()) + "Cube {!r} must contain a single 1D x coordinate.".format(cube.name()) ) x_coord = x_coords[0] y_coords = cube.coords(axis="y", dim_coords=dim_coords) if len(y_coords) != 1 or y_coords[0].ndim != 1: raise ValueError( - "Cube {!r} must contain a single 1D y " - "coordinate.".format(cube.name()) + "Cube {!r} must contain a single 1D y coordinate.".format(cube.name()) ) y_coord = y_coords[0] @@ -284,9 +280,7 @@ def _account_for_circular(self, points, data): def _account_for_inverted(self, data): if np.any(self._coord_decreasing): dim_slices = [slice(None)] * data.ndim - for interp_dim, flip in zip( - self._interp_dims, self._coord_decreasing - ): + for interp_dim, flip in zip(self._interp_dims, self._coord_decreasing): if flip: dim_slices[interp_dim] = slice(-1, None, -1) data = data[tuple(dim_slices)] @@ -435,9 +429,7 @@ def _setup(self): # Only DimCoords can be circular. if circular: coord_points = extend_circular_coord(coord, coord_points) - offset = 0.5 * ( - coord_points.max() + coord_points.min() - modulus - ) + offset = 0.5 * (coord_points.max() + coord_points.min() - modulus) self._circulars.append( (circular, modulus, index, coord_dims[0], offset) ) @@ -466,17 +458,13 @@ def _validate(self): for coord in self._src_coords: if coord.ndim != 1: raise ValueError( - "Interpolation coords must be 1-d for " - "rectilinear interpolation." + "Interpolation coords must be 1-d for rectilinear interpolation." ) if not isinstance(coord, DimCoord): # Check monotonic. if not iris.util.monotonic(coord.points, strict=True): - msg = ( - "Cannot interpolate over the non-" - "monotonic coordinate {}." - ) + msg = "Cannot interpolate over the non-monotonic coordinate {}." raise ValueError(msg.format(coord.name())) def _interpolated_dtype(self, dtype): @@ -541,9 +529,7 @@ def _points(self, sample_points, data, data_dims=None): for dim in range(self._src_cube.ndim): if dim not in data_dims: strides.insert(dim, 0) - data = as_strided( - data, strides=strides, shape=self._src_cube.shape - ) + data = as_strided(data, strides=strides, shape=self._src_cube.shape) data = self._account_for_inverted(data) # Calculate the transpose order to shuffle the interpolated dimensions @@ -553,9 +539,7 @@ def _points(self, sample_points, data, data_dims=None): di = self._interp_dims ds = sorted(dims, key=lambda d: d not in di) dmap = {d: di.index(d) if d in di else ds.index(d) for d in dims} - interp_order, _ = zip( - *sorted(dmap.items(), key=operator.itemgetter(1)) - ) + interp_order, _ = zip(*sorted(dmap.items(), key=operator.itemgetter(1))) _, src_order = zip(*sorted(dmap.items(), key=operator.itemgetter(0))) # Prepare the sample points for interpolation and calculate the @@ -619,13 +603,9 @@ def __call__(self, sample_points, collapse_scalar=True): """ if len(sample_points) != len(self._src_coords): msg = "Expected sample points for {} coordinates, got {}." - raise ValueError( - msg.format(len(self._src_coords), len(sample_points)) - ) + raise ValueError(msg.format(len(self._src_coords), len(sample_points))) - sample_points = _canonical_sample_points( - self._src_coords, sample_points - ) + sample_points = _canonical_sample_points(self._src_coords, sample_points) data = self._src_cube.data # Interpolate the cube payload. @@ -670,9 +650,7 @@ def construct_new_coord(coord): else: if set(dims).intersection(set(self._interp_dims)): # Interpolate the coordinate payload. - new_coord = self._resample_coord( - sample_points, coord, dims - ) + new_coord = self._resample_coord(sample_points, coord, dims) else: new_coord = coord.copy() return new_coord, dims diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 113c21e6e3..9953fc4cb4 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -44,9 +44,7 @@ def _transform_xy_arrays(crs_from, x, y, crs_to): return pts[..., 0], pts[..., 1] -def _regrid_weighted_curvilinear_to_rectilinear__prepare( - src_cube, weights, grid_cube -): +def _regrid_weighted_curvilinear_to_rectilinear__prepare(src_cube, weights, grid_cube): """ First (setup) part of 'regrid_weighted_curvilinear_to_rectilinear'. @@ -87,10 +85,7 @@ def _regrid_weighted_curvilinear_to_rectilinear__prepare( raise ValueError(msg.format(sx.name(), sy.name())) if sx.coord_system is None: - msg = ( - "The source X and Y coordinates must have a defined " - "coordinate system." - ) + msg = "The source X and Y coordinates must have a defined coordinate system." raise ValueError(msg) if tx.units != ty.units: @@ -101,10 +96,7 @@ def _regrid_weighted_curvilinear_to_rectilinear__prepare( raise ValueError(msg.format(tx.name(), ty.name())) if tx.coord_system is None: - msg = ( - "The target X and Y coordinates must have a defined " - "coordinate system." - ) + msg = "The target X and Y coordinates must have a defined coordinate system." raise ValueError(msg) if tx.coord_system != ty.coord_system: @@ -117,24 +109,15 @@ def _regrid_weighted_curvilinear_to_rectilinear__prepare( if weights is None: weights = np.ones(sx.shape) if weights.shape != sx.shape: - msg = ( - "Provided weights must have the same shape as the X and Y " - "coordinates." - ) + msg = "Provided weights must have the same shape as the X and Y coordinates." raise ValueError(msg) if not tx.has_bounds() or not tx.is_contiguous(): - msg = ( - "The target grid cube x ({!r})coordinate requires " - "contiguous bounds." - ) + msg = "The target grid cube x ({!r})coordinate requires contiguous bounds." raise ValueError(msg.format(tx.name())) if not ty.has_bounds() or not ty.is_contiguous(): - msg = ( - "The target grid cube y ({!r}) coordinate requires " - "contiguous bounds." - ) + msg = "The target grid cube y ({!r}) coordinate requires contiguous bounds." raise ValueError(msg.format(ty.name())) def _src_align_and_flatten(coord): @@ -329,9 +312,7 @@ def _curvilinear_to_rectilinear_regrid_data( sum_weights = valid_src_cells @ sparse_matrix.T data = r_data if sum_weights is None: - sum_weights = ( - np.ones(data_shape).reshape(-1, grid_size) @ sparse_matrix.T - ) + sum_weights = np.ones(data_shape).reshape(-1, grid_size) @ sparse_matrix.T # Work out where output cells are missing all contributions. # This allows for where 'rows' contains output cells that have no # data because of missing input points. @@ -365,9 +346,7 @@ def _curvilinear_to_rectilinear_regrid_data( return result -def _regrid_weighted_curvilinear_to_rectilinear__perform( - src_cube, regrid_info -): +def _regrid_weighted_curvilinear_to_rectilinear__perform(src_cube, regrid_info): """ Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'. @@ -502,10 +481,8 @@ def __call__(self, src): slice_cube = next(src.slices(sx)) if self._regrid_info is None: # Calculate the basic regrid info just once. - self._regrid_info = ( - _regrid_weighted_curvilinear_to_rectilinear__prepare( - slice_cube, self.weights, self._target_cube - ) + self._regrid_info = _regrid_weighted_curvilinear_to_rectilinear__prepare( + slice_cube, self.weights, self._target_cube ) result = _regrid_weighted_curvilinear_to_rectilinear__perform( src, self._regrid_info @@ -521,9 +498,7 @@ class RectilinearRegridder: """ - def __init__( - self, src_grid_cube, tgt_grid_cube, method, extrapolation_mode - ): + def __init__(self, src_grid_cube, tgt_grid_cube, method, extrapolation_mode): """ Create a regridder for conversions between the source and target grids. @@ -969,9 +944,7 @@ def __call__(self, src): extrapolation_mode=self._extrapolation_mode, ) - data = map_complete_blocks( - src, regrid, (y_dim, x_dim), sample_grid_x.shape - ) + data = map_complete_blocks(src, regrid, (y_dim, x_dim), sample_grid_x.shape) # Wrap up the data as a Cube. _regrid_callback = functools.partial( @@ -999,9 +972,7 @@ def regrid_callback(*args, **kwargs): return result -def _create_cube( - data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback -): +def _create_cube(data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback): r""" Return a new cube for the result of regridding. Returned cube represents the result of regridding the source cube @@ -1074,11 +1045,7 @@ def copy_coords(src_coords, add_method): def dim_offset(dim): offset = sum( - [ - d <= dim - for d in (grid_dim_x, grid_dim_y) - if d is not None - ] + [d <= dim for d in (grid_dim_x, grid_dim_y) if d is not None] ) if offset and num_tgt_dims == 1: offset -= 1 @@ -1103,8 +1070,7 @@ def regrid_reference_surface( # Determine which of the reference surface's dimensions span the X # and Y dimensions of the source cube. relative_surface_dims = [ - surface_dims.index(dim) if dim is not None else None - for dim in src_dims + surface_dims.index(dim) if dim is not None else None for dim in src_dims ] surface = regrid_callback( src_surface_coord.points, diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index bfa070c7c7..cceb1ba7ab 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -25,9 +25,7 @@ def _ndim_coords_from_arrays(points, ndim=None): p = np.broadcast_arrays(*points) for j in range(1, len(p)): if p[j].shape != p[0].shape: - raise ValueError( - "coordinate arrays do not have the same shape" - ) + raise ValueError("coordinate arrays do not have the same shape") points = np.empty(p[0].shape + (len(points),), dtype=float) for j, item in enumerate(p): points[..., j] = item @@ -123,9 +121,7 @@ def __init__( self.fill_value = fill_value if fill_value is not None: - if hasattr(values, "dtype") and not np.can_cast( - fill_value, values.dtype - ): + if hasattr(values, "dtype") and not np.can_cast(fill_value, values.dtype): raise ValueError( "fill_value must be either 'None' or " "of a type compatible with values" @@ -134,13 +130,10 @@ def __init__( for i, p in enumerate(points): if not np.all(np.diff(p) > 0.0): raise ValueError( - "The points in dimension %d must be strictly " - "ascending" % i + "The points in dimension %d must be strictly ascending" % i ) if not np.asarray(p).ndim == 1: - raise ValueError( - "The points in dimension %d must be " "1-dimensional" % i - ) + raise ValueError("The points in dimension %d must be 1-dimensional" % i) if not values.shape[i] == len(p): raise ValueError( "There are %d points and %d values in " @@ -240,18 +233,13 @@ def compute_interp_weights(self, xi, method=None): ) corners = itertools.product( - *[ - [(i, 1 - n), (i + 1, n)] - for i, n in zip(indices, norm_distances) - ] + *[[(i, 1 - n), (i + 1, n)] for i, n in zip(indices, norm_distances)] ) shape = self.values.shape[:ndim] for i, corner in enumerate(corners): corner_indices = [ci for ci, cw in corner] - n_indices = np.ravel_multi_index( - corner_indices, shape, mode="wrap" - ) + n_indices = np.ravel_multi_index(corner_indices, shape, mode="wrap") col_indices[i::n_src_values_per_result_value] = n_indices for ci, cw in corner: weights[i::n_src_values_per_result_value] *= cw @@ -300,9 +288,7 @@ def interp_using_pre_computed_weights(self, computed_weights): if method == "linear": result = self._evaluate_linear_sparse(indices) elif method == "nearest": - result = self._evaluate_nearest( - indices, norm_distances, out_of_bounds - ) + result = self._evaluate_nearest(indices, norm_distances, out_of_bounds) if not self.bounds_error and self.fill_value is not None: result[out_of_bounds] = self.fill_value diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index f312aa02a0..220999ca55 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -42,8 +42,7 @@ def _construct_delta_coord(coord): circular = getattr(coord, "circular", False) if coord.shape == (1,) and not circular: raise ValueError( - "Cannot take interval differences of a single " - "valued coordinate." + "Cannot take interval differences of a single valued coordinate." ) if circular: @@ -93,9 +92,7 @@ def _construct_midpoint_coord(coord, circular=None): if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(coord) if coord.shape == (1,) and not circular: - raise ValueError( - "Cannot take the midpoints of a single valued " "coordinate." - ) + raise ValueError("Cannot take the midpoints of a single valued coordinate.") # Calculate the delta of the coordinate # (this deals with circularity nicely). @@ -201,9 +198,7 @@ def cube_delta(cube, coord): ) ) - delta_cube.rename( - "change_in_{}_wrt_{}".format(delta_cube.name(), coord.name()) - ) + delta_cube.rename("change_in_{}_wrt_{}".format(delta_cube.name(), coord.name())) return delta_cube @@ -282,9 +277,7 @@ def differentiate(cube, coord_to_differentiate): delta_cube = iris.analysis.maths.divide(delta_cube, delta_coord, delta_dim) # Update the standard name - delta_cube.rename( - "derivative_of_{}_wrt_{}".format(cube.name(), coord.name()) - ) + delta_cube.rename("derivative_of_{}_wrt_{}".format(cube.name(), coord.name())) return delta_cube @@ -578,9 +571,7 @@ def curl(i_cube, j_cube, k_cube=None): if bad_coords: raise ValueError( "Some coordinates are different ({}), consider " - "resampling.".format( - ", ".join(group.name() for group in bad_coords) - ) + "resampling.".format(", ".join(group.name() for group in bad_coords)) ) # Get the dim_coord, or None if none exist, for the xyz dimensions @@ -672,9 +663,7 @@ def curl(i_cube, j_cube, k_cube=None): spherical = True if not spherical: - raise ValueError( - "Cannot take the curl over a non-spherical " "ellipsoid." - ) + raise ValueError("Cannot take the curl over a non-spherical ellipsoid.") lon_coord = x_coord.copy() lat_coord = y_coord.copy() @@ -722,9 +711,7 @@ def curl(i_cube, j_cube, k_cube=None): d_k_cube_dphi = _curl_differentiate(k_cube, lon_coord) d_k_cube_dphi = _curl_regrid(d_k_cube_dphi, prototype_diff) if d_k_cube_dphi is not None: - d_k_cube_dphi = iris.analysis.maths.divide( - d_k_cube_dphi, lat_cos_coord - ) + d_k_cube_dphi = iris.analysis.maths.divide(d_k_cube_dphi, lat_cos_coord) dri_dr = _curl_differentiate(r * i_cube, z_coord) if dri_dr is not None: dri_dr.units = dri_dr.units * r_unit @@ -789,10 +776,7 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): # Make a dictionary of {direction: phenomenon quantity} cube_directions, cube_phenomena = zip( - *[ - re.match(vector_qty, std_name).groups() - for std_name in cube_standard_names - ] + *[re.match(vector_qty, std_name).groups() for std_name in cube_standard_names] ) # Check that there is only one distinct phenomenon @@ -817,8 +801,7 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): # If we didn't get a match, raise an Exception if direction is None: direction_string = "; ".join( - ", ".join(possible_direction) - for possible_direction in directional_names + ", ".join(possible_direction) for possible_direction in directional_names ) raise ValueError( "{} are not recognised vector cube_directions. " diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index a760f5ab50..03de657601 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -112,13 +112,9 @@ def unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat): An array of unrotated longitudes and an array of unrotated latitudes. """ - src_proj = ccrs.RotatedGeodetic( - pole_longitude=pole_lon, pole_latitude=pole_lat - ) + src_proj = ccrs.RotatedGeodetic(pole_longitude=pole_lon, pole_latitude=pole_lat) target_proj = ccrs.Geodetic() - res = target_proj.transform_points( - x=rotated_lons, y=rotated_lats, src_crs=src_proj - ) + res = target_proj.transform_points(x=rotated_lons, y=rotated_lats, src_crs=src_proj) unrotated_lon = res[..., 0] unrotated_lat = res[..., 1] @@ -162,9 +158,7 @@ def rotate_pole(lons, lats, pole_lon, pole_lat): """ src_proj = ccrs.Geodetic() - target_proj = ccrs.RotatedGeodetic( - pole_longitude=pole_lon, pole_latitude=pole_lat - ) + target_proj = ccrs.RotatedGeodetic(pole_longitude=pole_lon, pole_latitude=pole_lat) res = target_proj.transform_points(x=lons, y=lats, src_crs=src_proj) rotated_lon = res[..., 0] rotated_lat = res[..., 1] @@ -174,16 +168,14 @@ def rotate_pole(lons, lats, pole_lon, pole_lat): def _get_lon_lat_coords(cube): def search_for_coord(coord_iterable, coord_name): - return [ - coord for coord in coord_iterable if coord_name in coord.name() - ] - - lat_coords = search_for_coord( - cube.dim_coords, "latitude" - ) or search_for_coord(cube.coords(), "latitude") - lon_coords = search_for_coord( - cube.dim_coords, "longitude" - ) or search_for_coord(cube.coords(), "longitude") + return [coord for coord in coord_iterable if coord_name in coord.name()] + + lat_coords = search_for_coord(cube.dim_coords, "latitude") or search_for_coord( + cube.coords(), "latitude" + ) + lon_coords = search_for_coord(cube.dim_coords, "longitude") or search_for_coord( + cube.coords(), "longitude" + ) if len(lat_coords) > 1 or len(lon_coords) > 1: raise ValueError( "Calling `_get_lon_lat_coords` with multiple same-type (i.e. dim/aux) lat or lon coords" @@ -216,9 +208,7 @@ def _xy_range(cube, mode=None): iris.coord_systems.RotatedGeogCS, ) if (cs is not None) and not isinstance(cs, cs_valid_types): - raise ValueError( - "Latlon coords cannot be found with {0}.".format(type(cs)) - ) + raise ValueError("Latlon coords cannot be found with {0}.".format(type(cs))) x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y") cs = cube.coord_system("CoordSystem") @@ -231,9 +221,7 @@ def _xy_range(cube, mode=None): if x_coord.has_bounds(): if mode not in [iris.coords.POINT_MODE, iris.coords.BOUND_MODE]: - raise ValueError( - 'When the coordinate has bounds, please specify "mode".' - ) + raise ValueError('When the coordinate has bounds, please specify "mode".') _mode = mode else: _mode = iris.coords.POINT_MODE @@ -479,9 +467,7 @@ def area_weights(cube, normalize=False): if dim is not None: wshape.append(ll_weights.shape[idim]) ll_weights = ll_weights.reshape(wshape) - broad_weights = iris.util.broadcast_to_shape( - ll_weights, cube.shape, broadcast_dims - ) + broad_weights = iris.util.broadcast_to_shape(ll_weights, cube.shape, broadcast_dims) return broad_weights @@ -526,17 +512,14 @@ def cosine_latitude_weights(cube): See more at :doc:`/userguide/real_and_lazy_data`. """ # Find all latitude coordinates, we want one and only one. - lat_coords = [ - coord for coord in cube.coords() if "latitude" in coord.name() - ] + lat_coords = [coord for coord in cube.coords() if "latitude" in coord.name()] if len(lat_coords) > 1: raise ValueError("Multiple latitude coords are currently disallowed.") try: lat = lat_coords[0] except IndexError: raise ValueError( - "Cannot get latitude " - "coordinate from cube {!r}.".format(cube.name()) + "Cannot get latitude coordinate from cube {!r}.".format(cube.name()) ) # Get the dimension position(s) of the latitude coordinate. @@ -557,8 +540,7 @@ def cosine_latitude_weights(cube): lat.points > np.pi / 2.0 + threshold ): warnings.warn( - "Out of range latitude values will be " - "clipped to the valid range.", + "Out of range latitude values will be clipped to the valid range.", category=iris.exceptions.IrisDefaultingWarning, ) points = lat.points @@ -572,9 +554,7 @@ def cosine_latitude_weights(cube): if dim is not None: wshape.append(l_weights.shape[idim]) l_weights = l_weights.reshape(wshape) - broad_weights = iris.util.broadcast_to_shape( - l_weights, cube.shape, broadcast_dims - ) + broad_weights = iris.util.broadcast_to_shape(l_weights, cube.shape, broadcast_dims) return broad_weights @@ -711,8 +691,7 @@ def project(cube, target_proj, nx=None, ny=None): # Determine dimension mappings - expect either 1d or 2d if lat_coord.ndim != lon_coord.ndim: raise ValueError( - "The latitude and longitude coordinates have " - "different dimensionality." + "The latitude and longitude coordinates have different dimensionality." ) latlon_ndim = lat_coord.ndim @@ -974,9 +953,7 @@ def _crs_distance_differentials(crs, x, y): # Transform points to true-latlon (just to get the true latitudes). _, true_lat = _transform_xy(crs, x, y, crs_latlon) # Get coordinate differentials w.r.t. true-latlon. - dlon_dx, dlat_dx, dlon_dy, dlat_dy = _inter_crs_differentials( - crs, x, y, crs_latlon - ) + dlon_dx, dlat_dx, dlon_dy, dlat_dy = _inter_crs_differentials(crs, x, y, crs_latlon) # Calculate effective scalings of X and Y coordinates. lat_factor = np.cos(np.deg2rad(true_lat)) ** 2 ds_dx = np.sqrt(dlat_dx * dlat_dx + dlon_dx * dlon_dx * lat_factor) @@ -1017,9 +994,7 @@ def _transform_distance_vectors(u_dist, v_dist, ds, dx2, dy2): return u2_dist, v2_dist -def _transform_distance_vectors_tolerance_mask( - src_crs, x, y, tgt_crs, ds, dx2, dy2 -): +def _transform_distance_vectors_tolerance_mask(src_crs, x, y, tgt_crs, ds, dx2, dy2): """ Return a mask that can be applied to data array to mask elements where the magnitude of vectors are not preserved due to numerical @@ -1138,15 +1113,11 @@ def rotate_winds(u_cube, v_cube, target_cs): ) if u_cube.coord(axis="x") != v_cube.coord(axis="x"): raise ValueError( - msg.format( - u_cube.coord(axis="x").name(), v_cube.coord(axis="x").name() - ) + msg.format(u_cube.coord(axis="x").name(), v_cube.coord(axis="x").name()) ) if u_cube.coord(axis="y") != v_cube.coord(axis="y"): raise ValueError( - msg.format( - u_cube.coord(axis="y").name(), v_cube.coord(axis="y").name() - ) + msg.format(u_cube.coord(axis="y").name(), v_cube.coord(axis="y").name()) ) # Check x and y coords have the same coordinate system. @@ -1193,13 +1164,11 @@ def rotate_winds(u_cube, v_cube, target_cs): # Check the dimension mappings match between u_cube and v_cube. if u_cube.coord_dims(x_coord) != v_cube.coord_dims(x_coord): raise ValueError( - "Dimension mapping of x coordinate differs " - "between u and v cubes." + "Dimension mapping of x coordinate differs between u and v cubes." ) if u_cube.coord_dims(y_coord) != v_cube.coord_dims(y_coord): raise ValueError( - "Dimension mapping of y coordinate differs " - "between u and v cubes." + "Dimension mapping of y coordinate differs between u and v cubes." ) x_dims = u_cube.coord_dims(x_coord) y_dims = u_cube.coord_dims(y_coord) diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index d7ed7f8840..719da18690 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -72,7 +72,7 @@ def _extract_relevant_cube_slice(cube, geometry): x_min_ix = x_min_ix[np.argmax(x_bounds_lower[x_min_ix])] except ValueError: warnings.warn( - "The geometry exceeds the cube's x dimension at the " "lower end.", + "The geometry exceeds the cube's x dimension at the lower end.", category=iris.exceptions.IrisGeometryExceedWarning, ) x_min_ix = 0 if x_ascending else x_coord.points.size - 1 @@ -82,7 +82,7 @@ def _extract_relevant_cube_slice(cube, geometry): x_max_ix = x_max_ix[np.argmin(x_bounds_upper[x_max_ix])] except ValueError: warnings.warn( - "The geometry exceeds the cube's x dimension at the " "upper end.", + "The geometry exceeds the cube's x dimension at the upper end.", category=iris.exceptions.IrisGeometryExceedWarning, ) x_max_ix = x_coord.points.size - 1 if x_ascending else 0 @@ -92,7 +92,7 @@ def _extract_relevant_cube_slice(cube, geometry): y_min_ix = y_min_ix[np.argmax(y_bounds_lower[y_min_ix])] except ValueError: warnings.warn( - "The geometry exceeds the cube's y dimension at the " "lower end.", + "The geometry exceeds the cube's y dimension at the lower end.", category=iris.exceptions.IrisGeometryExceedWarning, ) y_min_ix = 0 if y_ascending else y_coord.points.size - 1 @@ -102,7 +102,7 @@ def _extract_relevant_cube_slice(cube, geometry): y_max_ix = y_max_ix[np.argmin(y_bounds_upper[y_max_ix])] except ValueError: warnings.warn( - "The geometry exceeds the cube's y dimension at the " "upper end.", + "The geometry exceeds the cube's y dimension at the upper end.", category=iris.exceptions.IrisGeometryExceedWarning, ) y_max_ix = y_coord.points.size - 1 if y_ascending else 0 diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index d17d3ea93c..62db621ec3 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -68,9 +68,7 @@ def _output_dtype(op, first_dtype, second_dtype=None, in_place=False): result = first_dtype else: operand_dtypes = ( - (first_dtype, second_dtype) - if second_dtype is not None - else (first_dtype,) + (first_dtype, second_dtype) if second_dtype is not None else (first_dtype,) ) arrays = [np.array([1], dtype=dtype) for dtype in operand_dtypes] result = op(*arrays).dtype @@ -92,9 +90,7 @@ def _get_dtype(operand): An instance of :class:`numpy.dtype` """ - return ( - np.min_scalar_type(operand) if np.isscalar(operand) else operand.dtype - ) + return np.min_scalar_type(operand) if np.isscalar(operand) else operand.dtype def abs(cube, in_place=False): @@ -123,9 +119,7 @@ def abs(cube, in_place=False): _assert_is_cube(cube) new_dtype = _output_dtype(np.abs, cube.dtype, in_place=in_place) op = da.absolute if cube.has_lazy_data() else np.abs - return _math_op_common( - cube, op, cube.units, new_dtype=new_dtype, in_place=in_place - ) + return _math_op_common(cube, op, cube.units, new_dtype=new_dtype, in_place=in_place) def intersection_of_cubes(cube, other_cube): @@ -188,14 +182,11 @@ def intersection_of_cubes(cube, other_cube): if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(coord) - coord_comp = iris.analysis._dimensional_metadata_comparison( - cube, other_cube - ) + coord_comp = iris.analysis._dimensional_metadata_comparison(cube, other_cube) if coord_comp["ungroupable_and_dimensioned"]: raise ValueError( - "Cubes do not share all coordinates in common, " - "cannot intersect." + "Cubes do not share all coordinates in common, cannot intersect." ) # cubes must have matching coordinates @@ -216,9 +207,7 @@ def _assert_is_cube(cube): from iris.cube import Cube if not isinstance(cube, Cube): - raise TypeError( - 'The "cube" argument must be an instance of ' "iris.cube.Cube." - ) + raise TypeError('The "cube" argument must be an instance of ' "iris.cube.Cube.") @_lenient_client(services=SERVICES) @@ -735,9 +724,7 @@ def log10(cube, in_place=False): ) -def apply_ufunc( - ufunc, cube, other=None, new_unit=None, new_name=None, in_place=False -): +def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place=False): """ Apply a `numpy universal function `_ to a cube @@ -789,9 +776,7 @@ def apply_ufunc( """ if not isinstance(ufunc, np.ufunc): - ufunc_name = getattr( - ufunc, "__name__", "function passed to apply_ufunc" - ) + ufunc_name = getattr(ufunc, "__name__", "function passed to apply_ufunc") emsg = f"{ufunc_name} is not recognised, it is not an instance of numpy.ufunc" raise TypeError(emsg) @@ -934,9 +919,9 @@ def unary_func(lhs): elif ma.is_masked(rhs) and not isinstance(cube.data, ma.MaskedArray): cube.data = ma.array(cube.data) - elif isinstance( - cube.core_data(), ma.MaskedArray - ) and iris._lazy_data.is_lazy_data(rhs): + elif isinstance(cube.core_data(), ma.MaskedArray) and iris._lazy_data.is_lazy_data( + rhs + ): # Workaround for #2987. numpy#15200 discusses the general problem. cube = cube.copy(cube.lazy_data()) @@ -977,8 +962,7 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): except iris.exceptions.CoordinateNotFoundError: raise ValueError( "Could not determine dimension for %s. " - "Use %s(cube, coord, dim=dim)" - % (operation_name, operation_name) + "Use %s(cube, coord, dim=dim)" % (operation_name, operation_name) ) if other.ndim != 1: @@ -1161,10 +1145,7 @@ def ws_units_func(u_cube, v_cube): args = [ param for param in sig.parameters.values() - if ( - param.kind != param.KEYWORD_ONLY - and param.default is param.empty - ) + if (param.kind != param.KEYWORD_ONLY and param.default is param.empty) ] self.nin = len(args) diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 121d862adb..530be13391 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -139,40 +139,26 @@ def _ones_like(cube): ) dims_1_common = [ - i - for i in range(cube_1.ndim) - if dim_coords_1[i] in common_dim_coords + i for i in range(cube_1.ndim) if dim_coords_1[i] in common_dim_coords ] weights_1 = broadcast_to_shape(weights, cube_1.shape, dims_1_common) if cube_2.shape != smaller_shape: dims_2_common = [ - i - for i in range(cube_2.ndim) - if dim_coords_2[i] in common_dim_coords + i for i in range(cube_2.ndim) if dim_coords_2[i] in common_dim_coords ] - weights_2 = broadcast_to_shape( - weights, cube_2.shape, dims_2_common - ) + weights_2 = broadcast_to_shape(weights, cube_2.shape, dims_2_common) else: weights_2 = weights # Calculate correlations. - s1 = cube_1 - cube_1.collapsed( - corr_coords, iris.analysis.MEAN, weights=weights_1 - ) - s2 = cube_2 - cube_2.collapsed( - corr_coords, iris.analysis.MEAN, weights=weights_2 - ) + s1 = cube_1 - cube_1.collapsed(corr_coords, iris.analysis.MEAN, weights=weights_1) + s2 = cube_2 - cube_2.collapsed(corr_coords, iris.analysis.MEAN, weights=weights_2) covar = (s1 * s2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1, mdtol=mdtol ) - var_1 = (s1**2).collapsed( - corr_coords, iris.analysis.SUM, weights=weights_1 - ) - var_2 = (s2**2).collapsed( - corr_coords, iris.analysis.SUM, weights=weights_2 - ) + var_1 = (s1**2).collapsed(corr_coords, iris.analysis.SUM, weights=weights_1) + var_2 = (s2**2).collapsed(corr_coords, iris.analysis.SUM, weights=weights_2) denom = iris.analysis.maths.apply_ufunc( np.sqrt, var_1 * var_2, new_unit=covar.units diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 99c8add123..42f47abf12 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -180,9 +180,7 @@ def interpolate(self, cube, method=None): # as the new `index` dimension created by interpolating. src_anon_dims = self._src_cube_anon_dims(cube) interp_anon_dims = self._src_cube_anon_dims(interpolated_cube) - (anon_dim_index,) = list( - set(interp_anon_dims) - set(src_anon_dims) - ) + (anon_dim_index,) = list(set(interp_anon_dims) - set(src_anon_dims)) # Add the new coord to the interpolated cube. interpolated_cube.add_dim_coord(index_coord, anon_dim_index) return interpolated_cube @@ -250,9 +248,7 @@ def interpolate(cube, sample_points, method=None): # about to sample, # and then adding a new dimension to accommodate all the sample points. remaining = [ - (dim, size) - for dim, size in enumerate(cube.shape) - if dim not in squish_my_dims + (dim, size) for dim, size in enumerate(cube.shape) if dim not in squish_my_dims ] new_data_shape = [size for dim, size in remaining] new_data_shape.append(trajectory_size) @@ -440,9 +436,7 @@ def interpolate(cube, sample_points, method=None): ) # Rearrange the data dimensions and the fancy indices into that order. source_data = source_data.transpose(dims_order) - fancy_source_indices = [ - fancy_source_indices[i_dim] for i_dim in dims_order - ] + fancy_source_indices = [fancy_source_indices[i_dim] for i_dim in dims_order] # Apply the fancy indexing to get all the result data points. new_cube.data = source_data[tuple(fancy_source_indices)] @@ -454,8 +448,7 @@ def interpolate(cube, sample_points, method=None): if not squish_my_dims.isdisjoint(cube.coord_dims(coord)) ] new_cube_coords = [ - new_cube.coord(column_coord.name()) - for column_coord in column_coords + new_cube.coord(column_coord.name()) for column_coord in column_coords ] all_point_indices = np.array(column_indexes) single_point_test_cube = cube[column_indexes[0]] @@ -475,14 +468,11 @@ def interpolate(cube, sample_points, method=None): # So here, we translate cube indexes into *coord* indexes. src_coord_dims = cube.coord_dims(src_coord) fancy_coord_index_arrays = [ - list(all_point_indices[:, src_dim]) - for src_dim in src_coord_dims + list(all_point_indices[:, src_dim]) for src_dim in src_coord_dims ] # Fill the new coord with all the correct points from the old one. - new_cube_coord.points = src_coord.points[ - tuple(fancy_coord_index_arrays) - ] + new_cube_coord.points = src_coord.points[tuple(fancy_coord_index_arrays)] # NOTE: the new coords do *not* have bounds. return new_cube @@ -628,22 +618,16 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): sample_space_cube.remove_coord(coord) # Order the sample point coords according to the sample space cube coords. - sample_space_coord_names = [ - coord.name() for coord in sample_space_cube.coords() - ] + sample_space_coord_names = [coord.name() for coord in sample_space_cube.coords()] new_order = [ - sample_space_coord_names.index(name) - for name in sample_point_coord_names + sample_space_coord_names.index(name) for name in sample_point_coord_names ] coord_values = np.array([coord_values[i] for i in new_order]) sample_point_coord_names = [sample_point_coord_names[i] for i in new_order] - sample_space_coords = ( - sample_space_cube.dim_coords + sample_space_cube.aux_coords - ) + sample_space_coords = sample_space_cube.dim_coords + sample_space_cube.aux_coords sample_space_coords_and_dims = [ - (coord, sample_space_cube.coord_dims(coord)) - for coord in sample_space_coords + (coord, sample_space_cube.coord_dims(coord)) for coord in sample_space_coords ] if cache is not None and cube in cache: @@ -656,9 +640,7 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): dtype=float, ) for d, ndi in enumerate(np.ndindex(sample_space_cube.data.shape)): - for c, (coord, coord_dims) in enumerate( - sample_space_coords_and_dims - ): + for c, (coord, coord_dims) in enumerate(sample_space_coords_and_dims): # Index of this datum along this coordinate (could be n-D). if coord_dims: keys = tuple(ndi[ind] for ind in coord_dims) @@ -794,13 +776,8 @@ def __init__(self, src_cube, target_grid_cube): raise ValueError(msg) src_x_coord = src_cube.coord(axis="x") src_y_coord = src_cube.coord(axis="y") - if src_cube.coord_dims(src_x_coord) != src_cube.coord_dims( - src_y_coord - ): - msg = ( - "Source cube X and Y coordinates must have the same " - "cube dimensions." - ) + if src_cube.coord_dims(src_x_coord) != src_cube.coord_dims(src_y_coord): + msg = "Source cube X and Y coordinates must have the same cube dimensions." raise ValueError(msg) # Record *copies* of the original grid coords, in the desired @@ -843,9 +820,7 @@ def __init__(self, src_cube, target_grid_cube): "Coordinate {!r} has units of {!r}, which does not " 'convert to "degrees".' ) - raise ValueError( - msg.format(coord.name(), str(coord.units)) - ) + raise ValueError(msg.format(coord.name(), str(coord.units))) else: # Check that source and target have the same X and Y units. if ( diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index 61855f1188..819dd7c17d 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -14,11 +14,7 @@ import dask.array as da import numpy as np -from iris.common import ( - CFVariableMixin, - CoordMetadata, - metadata_manager_factory, -) +from iris.common import CFVariableMixin, CoordMetadata, metadata_manager_factory import iris.coords from iris.exceptions import IrisIgnoringBoundsWarning @@ -302,8 +298,7 @@ def _remap(self, dependency_dims, derived_dims): # no transpose is needed. if derived_dims: keys = tuple( - slice(None) if dim in derived_dims else 0 - for dim in range(ndim) + slice(None) if dim in derived_dims else 0 for dim in range(ndim) ) nd_points = nd_points[keys] else: @@ -340,13 +335,9 @@ def _remap_with_bounds(self, dependency_dims, derived_dims): if coord: # Get the bounds or points as consistent with the Cube. if coord.nbounds: - nd_values = self._nd_bounds( - coord, dependency_dims[key], ndim - ) + nd_values = self._nd_bounds(coord, dependency_dims[key], ndim) else: - nd_values = self._nd_points( - coord, dependency_dims[key], ndim - ) + nd_values = self._nd_points(coord, dependency_dims[key], ndim) # Restrict to just the dimensions relevant to the # derived coord. NB. These are always in Cube-order, so @@ -383,9 +374,7 @@ class AtmosphereSigmaFactory(AuxCoordFactory): """ - def __init__( - self, pressure_at_top=None, sigma=None, surface_air_pressure=None - ): + def __init__(self, pressure_at_top=None, sigma=None, surface_air_pressure=None): """Creates an atmosphere sigma coordinate factory with the formula: p(n, k, j, i) = pressure_at_top + sigma(k) * @@ -441,9 +430,7 @@ def _check_dependencies(pressure_at_top, sigma, surface_air_pressure): f"Coordinate '{coord.name()}' has bounds. These will " "be disregarded" ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) # Check units if sigma.units.is_unknown(): @@ -451,8 +438,7 @@ def _check_dependencies(pressure_at_top, sigma, surface_air_pressure): sigma.units = cf_units.Unit("1") if not sigma.units.is_dimensionless(): raise ValueError( - f"Invalid units: 'sigma' must be dimensionless, got " - f"'{sigma.units}'" + f"Invalid units: 'sigma' must be dimensionless, got " f"'{sigma.units}'" ) if pressure_at_top.units != surface_air_pressure.units: raise ValueError( @@ -480,9 +466,7 @@ def dependencies(self): @staticmethod def _derive(pressure_at_top, sigma, surface_air_pressure): """Derive coordinate.""" - return pressure_at_top + sigma * ( - surface_air_pressure - pressure_at_top - ) + return pressure_at_top + sigma * (surface_air_pressure - pressure_at_top) def make_coord(self, coord_dims_func): """ @@ -512,9 +496,7 @@ def make_coord(self, coord_dims_func): # Bounds bounds = None if self.sigma.nbounds: - nd_values_by_key = self._remap_with_bounds( - dependency_dims, derived_dims - ) + nd_values_by_key = self._remap_with_bounds(dependency_dims, derived_dims) pressure_at_top = nd_values_by_key["pressure_at_top"] sigma = nd_values_by_key["sigma"] surface_air_pressure = nd_values_by_key["surface_air_pressure"] @@ -536,13 +518,9 @@ def make_coord(self, coord_dims_func): "disregarded", category=IrisIgnoringBoundsWarning, ) - surface_air_pressure_pts = nd_points_by_key[ - "surface_air_pressure" - ] + surface_air_pressure_pts = nd_points_by_key["surface_air_pressure"] bds_shape = list(surface_air_pressure_pts.shape) + [1] - surface_air_pressure = surface_air_pressure_pts.reshape( - bds_shape - ) + surface_air_pressure = surface_air_pressure_pts.reshape(bds_shape) bounds = self._derive(pressure_at_top, sigma, surface_air_pressure) # Create coordinate @@ -588,20 +566,18 @@ def __init__(self, delta=None, sigma=None, orography=None): if delta and delta.nbounds not in (0, 2): raise ValueError( - "Invalid delta coordinate: must have either 0 or" " 2 bounds." + "Invalid delta coordinate: must have either 0 or 2 bounds." ) if sigma and sigma.nbounds not in (0, 2): raise ValueError( - "Invalid sigma coordinate: must have either 0 or" " 2 bounds." + "Invalid sigma coordinate: must have either 0 or 2 bounds." ) if orography and orography.nbounds: msg = ( "Orography coordinate {!r} has bounds." " These will be disregarded.".format(orography.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) self.delta = delta self.sigma = sigma @@ -609,16 +585,10 @@ def __init__(self, delta=None, sigma=None, orography=None): self.standard_name = "altitude" if delta is None and orography is None: - emsg = ( - "Unable to determine units: no delta or orography " - "available." - ) + emsg = "Unable to determine units: no delta or orography available." raise ValueError(emsg) if delta and orography and delta.units != orography.units: - emsg = ( - "Incompatible units: delta and orography must have " - "the same units." - ) + emsg = "Incompatible units: delta and orography must have the same units." raise ValueError(emsg) self.units = (delta and delta.units) or orography.units if not self.units.is_convertible("m"): @@ -671,13 +641,9 @@ def make_coord(self, coord_dims_func): ) bounds = None - if (self.delta and self.delta.nbounds) or ( - self.sigma and self.sigma.nbounds - ): + if (self.delta and self.delta.nbounds) or (self.sigma and self.sigma.nbounds): # Build the bounds array. - nd_values_by_key = self._remap_with_bounds( - dependency_dims, derived_dims - ) + nd_values_by_key = self._remap_with_bounds(dependency_dims, derived_dims) delta = nd_values_by_key["delta"] sigma = nd_values_by_key["sigma"] orography = nd_values_by_key["orography"] @@ -688,8 +654,7 @@ def make_coord(self, coord_dims_func): raise ValueError("Invalid sigma coordinate bounds.") if orography.shape[-1:] not in [(), (1,)]: warnings.warn( - "Orography coordinate has bounds. " - "These are being disregarded.", + "Orography coordinate has bounds. These are being disregarded.", category=IrisIgnoringBoundsWarning, stacklevel=2, ) @@ -728,15 +693,13 @@ def update(self, old_coord, new_coord=None): if self.delta is old_coord: if new_coord and new_coord.nbounds not in (0, 2): raise ValueError( - "Invalid delta coordinate:" - " must have either 0 or 2 bounds." + "Invalid delta coordinate: must have either 0 or 2 bounds." ) self.delta = new_coord elif self.sigma is old_coord: if new_coord and new_coord.nbounds not in (0, 2): raise ValueError( - "Invalid sigma coordinate:" - " must have either 0 or 2 bounds." + "Invalid sigma coordinate: must have either 0 or 2 bounds." ) self.sigma = new_coord elif self.orography is old_coord: @@ -745,9 +708,7 @@ def update(self, old_coord, new_coord=None): "Orography coordinate {!r} has bounds." " These will be disregarded.".format(new_coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) self.orography = new_coord @@ -803,20 +764,18 @@ def _check_dependencies(delta, sigma, surface_air_pressure): # Check bounds. if delta and delta.nbounds not in (0, 2): raise ValueError( - "Invalid delta coordinate: must have either 0 or" " 2 bounds." + "Invalid delta coordinate: must have either 0 or 2 bounds." ) if sigma and sigma.nbounds not in (0, 2): raise ValueError( - "Invalid sigma coordinate: must have either 0 or" " 2 bounds." + "Invalid sigma coordinate: must have either 0 or 2 bounds." ) if surface_air_pressure and surface_air_pressure.nbounds: msg = ( "Surface pressure coordinate {!r} has bounds. These will" " be disregarded.".format(surface_air_pressure.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) # Check units. if sigma is not None and sigma.units.is_unknown(): @@ -890,13 +849,9 @@ def make_coord(self, coord_dims_func): ) bounds = None - if (self.delta and self.delta.nbounds) or ( - self.sigma and self.sigma.nbounds - ): + if (self.delta and self.delta.nbounds) or (self.sigma and self.sigma.nbounds): # Build the bounds array. - nd_values_by_key = self._remap_with_bounds( - dependency_dims, derived_dims - ) + nd_values_by_key = self._remap_with_bounds(dependency_dims, derived_dims) delta = nd_values_by_key["delta"] sigma = nd_values_by_key["sigma"] surface_air_pressure = nd_values_by_key["surface_air_pressure"] @@ -911,13 +866,9 @@ def make_coord(self, coord_dims_func): "These are being disregarded.", category=IrisIgnoringBoundsWarning, ) - surface_air_pressure_pts = nd_points_by_key[ - "surface_air_pressure" - ] + surface_air_pressure_pts = nd_points_by_key["surface_air_pressure"] bds_shape = list(surface_air_pressure_pts.shape) + [1] - surface_air_pressure = surface_air_pressure_pts.reshape( - bds_shape - ) + surface_air_pressure = surface_air_pressure_pts.reshape(bds_shape) bounds = self._derive(delta, sigma, surface_air_pressure) @@ -982,9 +933,7 @@ def __init__( def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev): # Check for sufficient factory coordinates. if zlev is None: - raise ValueError( - "Unable to determine units: no zlev coordinate available." - ) + raise ValueError("Unable to determine units: no zlev coordinate available.") if nsigma is None: raise ValueError("Missing nsigma coordinate.") @@ -1023,15 +972,12 @@ def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) for coord, term in ((depth_c, "depth_c"), (nsigma, "nsigma")): if coord is not None and coord.shape != (1,): - msg = ( - "Expected scalar {} coordinate {!r}: " - "got shape {!r}.".format(term, coord.name(), coord.shape) + msg = "Expected scalar {} coordinate {!r}: got shape {!r}.".format( + term, coord.name(), coord.shape ) raise ValueError(msg) @@ -1080,9 +1026,7 @@ def dependencies(self): zlev=self.zlev, ) - def _derive( - self, sigma, eta, depth, depth_c, zlev, nsigma, coord_dims_func - ): + def _derive(self, sigma, eta, depth, depth_c, zlev, nsigma, coord_dims_func): # Calculate the index of the 'z' dimension in the input arrays. # First find the cube 'z' dimension ... [cube_z_dim] = coord_dims_func(self.dependencies["zlev"]) @@ -1094,11 +1038,7 @@ def _derive( # Note: all the inputs have the same number of dimensions >= 1, except # for any missing dependencies, which have scalar values. allshapes = np.array( - [ - el.shape - for el in (sigma, eta, depth, depth_c, zlev) - if el.ndim > 0 - ] + [el.shape for el in (sigma, eta, depth, depth_c, zlev) if el.ndim > 0] ) result_shape = list(np.max(allshapes, axis=0)) ndims = len(result_shape) @@ -1130,23 +1070,17 @@ def _derive( if len(result_shape) > 1: result_chunks = [1] * len(result_shape) result_chunks[-2:] = result_shape[-2:] - ones_full_result = da.ones( - result_shape, chunks=result_chunks, dtype=zlev.dtype - ) + ones_full_result = da.ones(result_shape, chunks=result_chunks, dtype=zlev.dtype) # Expand nsigma_levs to its full required shape : needed as the # calculated result may have a fixed size of 1 in some dimensions. result_nsigma_levs = nsigma_levs * ones_full_result[z_slices_nsigma] # Likewise, expand zlev to its full required shape. - result_rest_levs = ( - zlev[z_slices_rest] * ones_full_result[z_slices_rest] - ) + result_rest_levs = zlev[z_slices_rest] * ones_full_result[z_slices_rest] # Combine nsigma and 'rest' levels for the final result. - result = da.concatenate( - [result_nsigma_levs, result_rest_levs], axis=z_dim - ) + result = da.concatenate([result_nsigma_levs, result_rest_levs], axis=z_dim) return result def make_coord(self, coord_dims_func): @@ -1181,16 +1115,12 @@ def make_coord(self, coord_dims_func): bounds = None if self.zlev.nbounds or (self.sigma and self.sigma.nbounds): # Build the bounds array. - nd_values_by_key = self._remap_with_bounds( - dependency_dims, derived_dims - ) + nd_values_by_key = self._remap_with_bounds(dependency_dims, derived_dims) valid_shapes = [(), (1,), (2,)] for key in ("sigma", "zlev"): if nd_values_by_key[key].shape[-1:] not in valid_shapes: name = self.dependencies[key].name() - msg = "Invalid bounds for {} " "coordinate {!r}.".format( - key, name - ) + msg = "Invalid bounds for {} coordinate {!r}.".format(key, name) raise ValueError(msg) valid_shapes.pop() for key in ("eta", "depth", "depth_c", "nsigma"): @@ -1200,9 +1130,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1283,9 +1211,7 @@ def _check_dependencies(sigma, eta, depth): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) # Check units. if sigma is not None and sigma.units.is_unknown(): @@ -1347,16 +1273,12 @@ def make_coord(self, coord_dims_func): bounds = None if self.sigma and self.sigma.nbounds: # Build the bounds array. - nd_values_by_key = self._remap_with_bounds( - dependency_dims, derived_dims - ) + nd_values_by_key = self._remap_with_bounds(dependency_dims, derived_dims) valid_shapes = [(), (1,), (2,)] key = "sigma" if nd_values_by_key[key].shape[-1:] not in valid_shapes: name = self.dependencies[key].name() - msg = "Invalid bounds for {} " "coordinate {!r}.".format( - key, name - ) + msg = "Invalid bounds for {} coordinate {!r}.".format(key, name) raise ValueError(msg) valid_shapes.pop() for key in ("eta", "depth"): @@ -1366,9 +1288,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1426,13 +1346,7 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None): @staticmethod def _check_dependencies(s, c, eta, depth, depth_c): # Check for sufficient factory coordinates. - if ( - eta is None - or s is None - or c is None - or depth is None - or depth_c is None - ): + if eta is None or s is None or c is None or depth is None or depth_c is None: msg = ( "Unable to construct Ocean s-coordinate, generic form 1 " "factory due to insufficient source coordinates." @@ -1463,14 +1377,11 @@ def _check_dependencies(s, c, eta, depth, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) if depth_c is not None and depth_c.shape != (1,): - msg = ( - "Expected scalar {} coordinate {!r}: " - "got shape {!r}.".format(term, coord.name(), coord.shape) + msg = "Expected scalar {} coordinate {!r}: got shape {!r}.".format( + term, coord.name(), coord.shape ) raise ValueError(msg) @@ -1545,16 +1456,12 @@ def make_coord(self, coord_dims_func): bounds = None if self.s.nbounds or (self.c and self.c.nbounds): # Build the bounds array. - nd_values_by_key = self._remap_with_bounds( - dependency_dims, derived_dims - ) + nd_values_by_key = self._remap_with_bounds(dependency_dims, derived_dims) valid_shapes = [(), (1,), (2,)] key = "s" if nd_values_by_key[key].shape[-1:] not in valid_shapes: name = self.dependencies[key].name() - msg = "Invalid bounds for {} " "coordinate {!r}.".format( - key, name - ) + msg = "Invalid bounds for {} coordinate {!r}.".format(key, name) raise ValueError(msg) valid_shapes.pop() for key in ("eta", "depth", "depth_c"): @@ -1564,9 +1471,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1596,9 +1501,7 @@ def make_coord(self, coord_dims_func): class OceanSFactory(AuxCoordFactory): """Defines an Ocean s-coordinate factory.""" - def __init__( - self, s=None, eta=None, depth=None, a=None, b=None, depth_c=None - ): + def __init__(self, s=None, eta=None, depth=None, a=None, b=None, depth_c=None): """ Creates an Ocean s-coordinate factory with the formula: @@ -1660,16 +1563,13 @@ def _check_dependencies(s, eta, depth, a, b, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) coords = ((a, "a"), (b, "b"), (depth_c, "depth_c")) for coord, term in coords: if coord is not None and coord.shape != (1,): - msg = ( - "Expected scalar {} coordinate {!r}: " - "got shape {!r}.".format(term, coord.name(), coord.shape) + msg = "Expected scalar {} coordinate {!r}: got shape {!r}.".format( + term, coord.name(), coord.shape ) raise ValueError(msg) @@ -1679,9 +1579,8 @@ def _check_dependencies(s, eta, depth, a, b, depth_c): s.units = cf_units.Unit("1") if s is not None and not s.units.is_dimensionless(): - msg = ( - "Invalid units: s coordinate {!r} " - "must be dimensionless.".format(s.name()) + msg = "Invalid units: s coordinate {!r} must be dimensionless.".format( + s.name() ) raise ValueError(msg) @@ -1746,16 +1645,12 @@ def make_coord(self, coord_dims_func): bounds = None if self.s.nbounds: # Build the bounds array. - nd_values_by_key = self._remap_with_bounds( - dependency_dims, derived_dims - ) + nd_values_by_key = self._remap_with_bounds(dependency_dims, derived_dims) valid_shapes = [(), (1,), (2,)] key = "s" if nd_values_by_key[key].shape[-1:] not in valid_shapes: name = self.dependencies[key].name() - msg = "Invalid bounds for {} " "coordinate {!r}.".format( - key, name - ) + msg = "Invalid bounds for {} coordinate {!r}.".format(key, name) raise ValueError(msg) valid_shapes.pop() for key in ("eta", "depth", "a", "b", "depth_c"): @@ -1765,9 +1660,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1829,13 +1722,7 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None): @staticmethod def _check_dependencies(s, c, eta, depth, depth_c): # Check for sufficient factory coordinates. - if ( - eta is None - or s is None - or c is None - or depth is None - or depth_c is None - ): + if eta is None or s is None or c is None or depth is None or depth_c is None: msg = ( "Unable to construct Ocean s-coordinate, generic form 2 " "factory due to insufficient source coordinates." @@ -1866,14 +1753,11 @@ def _check_dependencies(s, c, eta, depth, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) if depth_c is not None and depth_c.shape != (1,): - msg = ( - "Expected scalar depth_c coordinate {!r}: " - "got shape {!r}.".format(depth_c.name(), depth_c.shape) + msg = "Expected scalar depth_c coordinate {!r}: got shape {!r}.".format( + depth_c.name(), depth_c.shape ) raise ValueError(msg) @@ -1948,16 +1832,12 @@ def make_coord(self, coord_dims_func): bounds = None if self.s.nbounds or (self.c and self.c.nbounds): # Build the bounds array. - nd_values_by_key = self._remap_with_bounds( - dependency_dims, derived_dims - ) + nd_values_by_key = self._remap_with_bounds(dependency_dims, derived_dims) valid_shapes = [(), (1,), (2,)] key = "s" if nd_values_by_key[key].shape[-1:] not in valid_shapes: name = self.dependencies[key].name() - msg = "Invalid bounds for {} " "coordinate {!r}.".format( - key, name - ) + msg = "Invalid bounds for {} coordinate {!r}.".format(key, name) raise ValueError(msg) valid_shapes.pop() for key in ("eta", "depth", "depth_c"): @@ -1967,9 +1847,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index 43dc09d5db..f2e3ec588b 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -381,9 +381,7 @@ def __call__(self, func): active = self.__dict__["active"] if active is not None and active in self: services = self.__dict__[active] - if isinstance(services, str) or not isinstance( - services, Iterable - ): + if isinstance(services, str) or not isinstance(services, Iterable): services = (services,) result = service in services return result diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index f88a2e57b5..aaebcdf66e 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -108,9 +108,7 @@ def __new__(mcs, name, bases, namespace): for base in bases: if hasattr(base, "_fields"): base_names = getattr(base, "_fields") - is_abstract = getattr( - base_names, "__isabstractmethod__", False - ) + is_abstract = getattr(base_names, "__isabstractmethod__", False) if not is_abstract: if (not isinstance(base_names, Iterable)) or isinstance( base_names, str @@ -177,9 +175,7 @@ def __eq__(self, other): if hasattr(other, "__class__") and other.__class__ is self.__class__: if _LENIENT(self.__eq__) or _LENIENT(self.equal): # Perform "lenient" equality. - logger.debug( - "lenient", extra=dict(cls=self.__class__.__name__) - ) + logger.debug("lenient", extra=dict(cls=self.__class__.__name__)) result = self._compare_lenient(other) else: # Perform "strict" equality. @@ -242,19 +238,13 @@ def __str__(self): field_strings = [] for field in self._fields: value = getattr(self, field) - if ( - value is None - or isinstance(value, (str, Mapping)) - and not value - ): + if value is None or isinstance(value, (str, Mapping)) and not value: continue field_strings.append(f"{field}={value}") return f"{type(self).__name__}({', '.join(field_strings)})" - def _api_common( - self, other, func_service, func_operation, action, lenient=None - ): + def _api_common(self, other, func_service, func_operation, action, lenient=None): """ Common entry-point for lenient metadata API methods. @@ -283,14 +273,9 @@ def _api_common( """ # Ensure that we have similar class instances. - if ( - not hasattr(other, "__class__") - or other.__class__ is not self.__class__ - ): + if not hasattr(other, "__class__") or other.__class__ is not self.__class__: emsg = "Cannot {} {!r} with {!r}." - raise TypeError( - emsg.format(action, self.__class__.__name__, type(other)) - ) + raise TypeError(emsg.format(action, self.__class__.__name__, type(other))) if lenient is None: result = func_operation(other) @@ -449,11 +434,7 @@ def func(field): # Note that, we use "_members" not "_fields". # Lenient equality explicitly ignores the "var_name" member. result = all( - [ - func(field) - for field in BaseMetadata._members - if field != "var_name" - ] + [func(field) for field in BaseMetadata._members if field != "var_name"] ) return result @@ -659,9 +640,7 @@ def difference(self, other, lenient=None): other, self.difference, self._difference, "differ", lenient=lenient ) result = ( - None - if all([item is None for item in result]) - else self.__class__(*result) + None if all([item is None for item in result]) else self.__class__(*result) ) return result @@ -886,11 +865,7 @@ def _difference_lenient(self, other): """ # Perform "strict" difference for "measure". - value = ( - None - if self.measure == other.measure - else (self.measure, other.measure) - ) + value = None if self.measure == other.measure else (self.measure, other.measure) # Perform lenient difference of the other parent members. result = super()._difference_lenient(other) result.append(value) @@ -1128,11 +1103,7 @@ def _combine_lenient(self, other): """ # Perform "strict" combination for "cell_methods". - value = ( - self.cell_methods - if self.cell_methods == other.cell_methods - else None - ) + value = self.cell_methods if self.cell_methods == other.cell_methods else None # Perform lenient combination of the other parent members. result = super()._combine_lenient(other) result.append(value) @@ -1355,15 +1326,11 @@ def _compare_lenient(self, other): # The "circular" member is not part of lenient equivalence. return super()._compare_lenient(other) - @wraps( - CoordMetadata._difference_lenient, assigned=("__doc__",), updated=() - ) + @wraps(CoordMetadata._difference_lenient, assigned=("__doc__",), updated=()) def _difference_lenient(self, other): # Perform "strict" difference for "circular". value = ( - None - if self.circular == other.circular - else (self.circular, other.circular) + None if self.circular == other.circular else (self.circular, other.circular) ) # Perform lenient difference of the other parent members. result = super()._difference_lenient(other) @@ -1478,20 +1445,14 @@ def metadata_filter( if standard_name is not None: result = [ - instance - for instance in result - if instance.standard_name == standard_name + instance for instance in result if instance.standard_name == standard_name ] if long_name is not None: - result = [ - instance for instance in result if instance.long_name == long_name - ] + result = [instance for instance in result if instance.long_name == long_name] if var_name is not None: - result = [ - instance for instance in result if instance.var_name == var_name - ] + result = [instance for instance in result if instance.var_name == var_name] if attributes is not None: if not isinstance(attributes, Mapping): @@ -1520,22 +1481,16 @@ def get_axis(instance): axis = guess_coord_axis(instance) return axis - result = [ - instance for instance in result if get_axis(instance) == axis - ] + result = [instance for instance in result if get_axis(instance) == axis] if obj is not None: - if hasattr(obj, "__class__") and issubclass( - obj.__class__, BaseMetadata - ): + if hasattr(obj, "__class__") and issubclass(obj.__class__, BaseMetadata): target_metadata = obj else: target_metadata = obj.metadata result = [ - instance - for instance in result - if instance.metadata == target_metadata + instance for instance in result if instance.metadata == target_metadata ] return result @@ -1593,10 +1548,7 @@ def __reduce__(self): def __repr__(self): args = ", ".join( - [ - "{}={!r}".format(field, getattr(self, field)) - for field in self._fields - ] + ["{}={!r}".format(field, getattr(self, field)) for field in self._fields] ) return "{}({})".format(self.__class__.__name__, args) diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index b6cd3132b5..56b9263555 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -44,9 +44,7 @@ def _get_valid_standard_name(name): name_is_valid &= std_name_modifier in valid_std_name_modifiers if not name_is_valid: - raise ValueError( - "{!r} is not a valid standard_name".format(name) - ) + raise ValueError("{!r} is not a valid standard_name".format(name)) return name @@ -214,9 +212,7 @@ def attributes(self): @attributes.setter def attributes(self, attributes): - self._metadata_manager.attributes = LimitedAttributeDict( - attributes or {} - ) + self._metadata_manager.attributes = LimitedAttributeDict(attributes or {}) @property def metadata(self): @@ -244,15 +240,11 @@ def metadata(self, metadata): else: # Generic iterable/container with no associated keys. missing = [ - field - for field in fields - if not hasattr(metadata, field) + field for field in fields if not hasattr(metadata, field) ] if missing: - missing = ", ".join( - map(lambda i: "{!r}".format(i), missing) - ) + missing = ", ".join(map(lambda i: "{!r}".format(i), missing)) emsg = "Invalid {!r} metadata, require {} to be specified." raise TypeError(emsg.format(type(arg), missing)) diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 83ca630353..c9d1936c41 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -347,20 +347,14 @@ def __call__(self, lhs, rhs): """ from iris.cube import Cube - emsg = ( - "{cls} requires {arg!r} argument to be a 'Cube', got {actual!r}." - ) + emsg = "{cls} requires {arg!r} argument to be a 'Cube', got {actual!r}." clsname = self.__class__.__name__ if not isinstance(lhs, Cube): - raise TypeError( - emsg.format(cls=clsname, arg="LHS", actual=type(lhs)) - ) + raise TypeError(emsg.format(cls=clsname, arg="LHS", actual=type(lhs))) if not isinstance(rhs, Cube): - raise TypeError( - emsg.format(cls=clsname, arg="RHS", actual=type(rhs)) - ) + raise TypeError(emsg.format(cls=clsname, arg="RHS", actual=type(rhs))) # Initialise the operand state. self.lhs_cube = lhs @@ -428,9 +422,7 @@ def _as_compatible_cubes(self): try: # Determine whether the tgt cube shape and proposed new src # cube shape will successfully broadcast together. - self._broadcast_shape = broadcast_shapes( - tgt_cube.shape, new_src_shape - ) + self._broadcast_shape = broadcast_shapes(tgt_cube.shape, new_src_shape) except ValueError: emsg = ( "Cannot resolve cubes, as a suitable transpose of the " @@ -984,12 +976,8 @@ def _free_mapping( # Determine the src/tgt dimensions that are not mapped, # and not covered by any metadata. - src_free = set(src_dim_coverage.dims_free) & set( - src_aux_coverage.dims_free - ) - tgt_free = set(tgt_dim_coverage.dims_free) & set( - tgt_aux_coverage.dims_free - ) + src_free = set(src_dim_coverage.dims_free) & set(src_aux_coverage.dims_free) + tgt_free = set(tgt_dim_coverage.dims_free) & set(tgt_aux_coverage.dims_free) if src_free or tgt_free: # Determine the src/tgt dimensions that are not mapped. @@ -1022,9 +1010,7 @@ def _assign_mapping(extent, unmapped_local_items, free_items=None): else: def _filter(items): - return list( - filter(lambda item: item[1] == extent, items) - ) + return list(filter(lambda item: item[1] == extent, items)) def _pop(item, items): dim, _ = item @@ -1088,9 +1074,7 @@ def _pop(item, items): break # Determine whether there are still unmapped src dimensions. - src_unmapped = ( - set(range(src_cube.ndim)) - set(self.mapping) - set(free_mapping) - ) + src_unmapped = set(range(src_cube.ndim)) - set(self.mapping) - set(free_mapping) if src_unmapped: plural = "s" if len(src_unmapped) > 1 else "" @@ -1119,9 +1103,7 @@ def _metadata_coverage(self): """ # Determine the common dim coordinate metadata coverage. - common_dim_metadata = [ - item.metadata for item in self.category_common.items_dim - ] + common_dim_metadata = [item.metadata for item in self.category_common.items_dim] self.lhs_cube_dim_coverage = self._dim_coverage( self.lhs_cube, @@ -1135,9 +1117,7 @@ def _metadata_coverage(self): ) # Determine the common aux and scalar coordinate metadata coverage. - common_aux_metadata = [ - item.metadata for item in self.category_common.items_aux - ] + common_aux_metadata = [item.metadata for item in self.category_common.items_aux] common_scalar_metadata = [ item.metadata for item in self.category_common.items_scalar ] @@ -1210,22 +1190,14 @@ def _metadata_mapping(self): # Use the dim coordinates to fully map the # src cube dimensions to the tgt cube dimensions. - self.mapping.update( - self._dim_mapping(src_dim_coverage, tgt_dim_coverage) - ) - logger.debug( - f"mapping common dim coordinates gives, mapping={self.mapping}" - ) + self.mapping.update(self._dim_mapping(src_dim_coverage, tgt_dim_coverage)) + logger.debug(f"mapping common dim coordinates gives, mapping={self.mapping}") # If necessary, use the aux coordinates to fully map the # src cube dimensions to the tgt cube dimensions. if not self.mapped: - self.mapping.update( - self._aux_mapping(src_aux_coverage, tgt_aux_coverage) - ) - logger.debug( - f"mapping common aux coordinates, mapping={self.mapping}" - ) + self.mapping.update(self._aux_mapping(src_aux_coverage, tgt_aux_coverage)) + logger.debug(f"mapping common aux coordinates, mapping={self.mapping}") if not self.mapped: # Attempt to complete the mapping using src/tgt free dimensions. @@ -1253,15 +1225,9 @@ def _metadata_mapping(self): # Given the number of free dimensions, determine whether the # mapping requires to be reversed. # Only applies to equal src/tgt dimensionality. - src_free = set(src_dim_coverage.dims_free) & set( - src_aux_coverage.dims_free - ) - tgt_free = set(tgt_dim_coverage.dims_free) & set( - tgt_aux_coverage.dims_free - ) - free_flip = src_cube.ndim == tgt_cube.ndim and len(tgt_free) > len( - src_free - ) + src_free = set(src_dim_coverage.dims_free) & set(src_aux_coverage.dims_free) + tgt_free = set(tgt_dim_coverage.dims_free) & set(tgt_aux_coverage.dims_free) + free_flip = src_cube.ndim == tgt_cube.ndim and len(tgt_free) > len(src_free) # Reverse the mapping direction. if broadcast_flip or free_flip: @@ -1338,9 +1304,7 @@ def _metadata_prepare(self): tgt_aux_coverage, ) - self._prepare_factory_payload( - tgt_cube, tgt_category_local, from_src=False - ) + self._prepare_factory_payload(tgt_cube, tgt_category_local, from_src=False) self._prepare_factory_payload(src_cube, src_category_local) def _metadata_resolve(self): @@ -1566,9 +1530,7 @@ def _prepare_common_aux_payload( src_type = type(src_coord) tgt_type = type(tgt_coord) # Downcast to aux if there are mixed container types. - container = ( - src_type if src_type is tgt_type else AuxCoord - ) + container = src_type if src_type is tgt_type else AuxCoord prepared_item = self._create_prepared_item( src_coord, tgt_item.dims, @@ -1710,9 +1672,7 @@ def _get_prepared_item( src = tgt = None if from_src: src = item.metadata - dims = tuple( - [self.mapping[dim] for dim in item.dims] - ) + dims = tuple([self.mapping[dim] for dim in item.dims]) else: tgt = item.metadata dims = item.dims @@ -1908,9 +1868,7 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): # Determine whether there are tgt dimensions not mapped to by an # associated src dimension, and thus may be covered by any local # tgt dim coordinates. - extra_tgt_dims = set(range(tgt_dim_coverage.cube.ndim)) - set( - mapped_tgt_dims - ) + extra_tgt_dims = set(range(tgt_dim_coverage.cube.ndim)) - set(mapped_tgt_dims) if LENIENT["maths"]: tgt_dims_conflict = set() @@ -1942,9 +1900,7 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): # Determine whether there are any tgt dims free to be mapped # by an available local tgt dim coordinate. - tgt_dims_unmapped = ( - set(tgt_dim_coverage.dims_local) - tgt_dims_conflict - ) + tgt_dims_unmapped = set(tgt_dim_coverage.dims_local) - tgt_dims_conflict else: # For strict maths, only local tgt dim coordinates covering # the extra dimensions of the tgt cube may be added. @@ -1961,9 +1917,7 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): ) self.prepared_category.items_dim.append(prepared_item) - def _prepare_local_payload_scalar( - self, src_aux_coverage, tgt_aux_coverage - ): + def _prepare_local_payload_scalar(self, src_aux_coverage, tgt_aux_coverage): """ Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each @@ -2119,11 +2073,7 @@ def _prepare_points_and_bounds( bounds = src_coord.bounds # Deal with coordinates spanning broadcast dimensions. - if ( - points is None - and bounds is None - and src_coord.shape != tgt_coord.shape - ): + if points is None and bounds is None and src_coord.shape != tgt_coord.shape: # Check whether the src coordinate is broadcasting. dims = tuple([self.mapping[dim] for dim in src_dims]) src_shape_broadcast = tuple([self.shape[dim] for dim in dims]) @@ -2158,9 +2108,7 @@ def _prepare_points_and_bounds( if points is None and bounds is None: # Note that, this also ensures shape equality. - eq_points = array_equal( - src_coord.points, tgt_coord.points, withnans=True - ) + eq_points = array_equal(src_coord.points, tgt_coord.points, withnans=True) if eq_points: points = src_coord.points src_has_bounds = src_coord.has_bounds() @@ -2168,9 +2116,7 @@ def _prepare_points_and_bounds( if src_has_bounds and tgt_has_bounds: src_bounds = src_coord.bounds - eq_bounds = array_equal( - src_bounds, tgt_coord.bounds, withnans=True - ) + eq_bounds = array_equal(src_bounds, tgt_coord.bounds, withnans=True) if eq_bounds: bounds = src_bounds @@ -2458,9 +2404,7 @@ def cube(self, data, in_place=False): result = Cube(data) # Add the combined cube metadata from both the candidate cubes. - result.metadata = self.lhs_cube.metadata.combine( - self.rhs_cube.metadata - ) + result.metadata = self.lhs_cube.metadata.combine(self.rhs_cube.metadata) # Add the prepared dim coordinates. for item in self.prepared_category.items_dim: @@ -2469,8 +2413,7 @@ def cube(self, data, in_place=False): # Add the prepared aux and scalar coordinates. prepared_aux_coords = ( - self.prepared_category.items_aux - + self.prepared_category.items_scalar + self.prepared_category.items_aux + self.prepared_category.items_scalar ) for item in prepared_aux_coords: # These items are "special" diff --git a/lib/iris/config.py b/lib/iris/config.py index 25aeffdb33..c31f856d54 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -38,9 +38,7 @@ import iris.exceptions -def get_logger( - name, datefmt=None, fmt=None, level=None, propagate=None, handler=True -): +def get_logger(name, datefmt=None, fmt=None, level=None, propagate=None, handler=True): """Create a custom class for logging. Create a :class:`logging.Logger` with a :class:`logging.StreamHandler` diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index 87103bf6f1..c9ad04097d 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -23,9 +23,7 @@ import iris.coords -def add_categorised_coord( - cube, name, from_coord, category_function, units="1" -): +def add_categorised_coord(cube, name, from_coord, category_function, units="1"): """ Add a new coordinate to a cube, by categorising an existing one. @@ -117,16 +115,12 @@ def _pt_date(coord, time): def add_year(cube, coord, name="year"): """Add a categorical calendar-year coordinate.""" - add_categorised_coord( - cube, name, coord, lambda coord, x: _pt_date(coord, x).year - ) + add_categorised_coord(cube, name, coord, lambda coord, x: _pt_date(coord, x).year) def add_month_number(cube, coord, name="month_number"): """Add a categorical month coordinate, values 1..12.""" - add_categorised_coord( - cube, name, coord, lambda coord, x: _pt_date(coord, x).month - ) + add_categorised_coord(cube, name, coord, lambda coord, x: _pt_date(coord, x).month) def add_month_fullname(cube, coord, name="month_fullname"): @@ -153,9 +147,7 @@ def add_month(cube, coord, name="month"): def add_day_of_month(cube, coord, name="day_of_month"): """Add a categorical day-of-month coordinate, values 1..31.""" - add_categorised_coord( - cube, name, coord, lambda coord, x: _pt_date(coord, x).day - ) + add_categorised_coord(cube, name, coord, lambda coord, x: _pt_date(coord, x).day) def add_day_of_year(cube, coord, name="day_of_year"): @@ -212,9 +204,7 @@ def add_weekday(cube, coord, name="weekday"): def add_hour(cube, coord, name="hour"): """Add a categorical hour coordinate, values 0..23.""" - add_categorised_coord( - cube, name, coord, lambda coord, x: _pt_date(coord, x).hour - ) + add_categorised_coord(cube, name, coord, lambda coord, x: _pt_date(coord, x).hour) # ---------------------------------------------- @@ -314,9 +304,7 @@ def _month_season_numbers(seasons): return month_season_numbers -def add_season( - cube, coord, name="season", seasons=("djf", "mam", "jja", "son") -): +def add_season(cube, coord, name="season", seasons=("djf", "mam", "jja", "son")): """ Add a categorical season-of-year coordinate, with user specified seasons. diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index e62f3fbf0e..ca8bf173f8 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -92,9 +92,7 @@ def xml_element(self, doc, attrs=None): xml_element_name = type(self).__name__ # lower case the first char first_char = xml_element_name[0] - xml_element_name = xml_element_name.replace( - first_char, first_char.lower(), 1 - ) + xml_element_name = xml_element_name.replace(first_char, first_char.lower(), 1) coord_system_xml_element = doc.createElement(xml_element_name) @@ -273,9 +271,7 @@ def __init__( self._datum = None #: Describes 'zero' on the ellipsoid in degrees. - self.longitude_of_prime_meridian = _arg_default( - longitude_of_prime_meridian, 0 - ) + self.longitude_of_prime_meridian = _arg_default(longitude_of_prime_meridian, 0) def _pretty_attrs(self): attrs = [("semi_major_axis", self.semi_major_axis)] @@ -304,9 +300,7 @@ def __repr__(self): if len(attrs) == 1 and attrs[0][0] == "semi_major_axis": return "GeogCS(%r)" % self.semi_major_axis else: - return "GeogCS(%s)" % ", ".join( - ["%s=%r" % (k, v) for k, v in attrs] - ) + return "GeogCS(%s)" % ", ".join(["%s=%r" % (k, v) for k, v in attrs]) def __str__(self): attrs = self._pretty_attrs() @@ -488,9 +482,7 @@ def from_datum(cls, datum, longitude_of_prime_meridian=None): crs._inverse_flattening = None #: Describes 'zero' on the ellipsoid in degrees. - crs.longitude_of_prime_meridian = _arg_default( - longitude_of_prime_meridian, 0 - ) + crs.longitude_of_prime_meridian = _arg_default(longitude_of_prime_meridian, 0) crs._datum = datum @@ -547,9 +539,7 @@ def __init__( self.grid_north_pole_longitude = float(grid_north_pole_longitude) #: Longitude of true north pole in rotated grid in degrees. - self.north_pole_grid_longitude = _arg_default( - north_pole_grid_longitude, 0 - ) + self.north_pole_grid_longitude = _arg_default(north_pole_grid_longitude, 0) #: Ellipsoid definition (:class:`GeogCS` or None). self.ellipsoid = ellipsoid @@ -560,18 +550,14 @@ def _pretty_attrs(self): ("grid_north_pole_longitude", self.grid_north_pole_longitude), ] if self.north_pole_grid_longitude != 0.0: - attrs.append( - ("north_pole_grid_longitude", self.north_pole_grid_longitude) - ) + attrs.append(("north_pole_grid_longitude", self.north_pole_grid_longitude)) if self.ellipsoid is not None: attrs.append(("ellipsoid", self.ellipsoid)) return attrs def __repr__(self): attrs = self._pretty_attrs() - result = "RotatedGeogCS(%s)" % ", ".join( - ["%s=%r" % (k, v) for k, v in attrs] - ) + result = "RotatedGeogCS(%s)" % ", ".join(["%s=%r" % (k, v) for k, v in attrs]) # Extra prettiness result = result.replace("grid_north_pole_latitude=", "") result = result.replace("grid_north_pole_longitude=", "") @@ -669,14 +655,10 @@ def __init__( """ #: True latitude of planar origin in degrees. - self.latitude_of_projection_origin = float( - latitude_of_projection_origin - ) + self.latitude_of_projection_origin = float(latitude_of_projection_origin) #: True longitude of planar origin in degrees. - self.longitude_of_central_meridian = float( - longitude_of_central_meridian - ) + self.longitude_of_central_meridian = float(longitude_of_central_meridian) #: X offset from planar origin in metres. self.false_easting = _arg_default(false_easting, 0) @@ -784,14 +766,10 @@ def __init__( """ #: True latitude of planar origin in degrees. - self.latitude_of_projection_origin = float( - latitude_of_projection_origin - ) + self.latitude_of_projection_origin = float(latitude_of_projection_origin) #: True longitude of planar origin in degrees. - self.longitude_of_projection_origin = float( - longitude_of_projection_origin - ) + self.longitude_of_projection_origin = float(longitude_of_projection_origin) #: X offset from planar origin in metres. self.false_easting = _arg_default(false_easting, 0) @@ -880,14 +858,10 @@ def __init__( """ #: True latitude of planar origin in degrees. - self.latitude_of_projection_origin = float( - latitude_of_projection_origin - ) + self.latitude_of_projection_origin = float(latitude_of_projection_origin) #: True longitude of planar origin in degrees. - self.longitude_of_projection_origin = float( - longitude_of_projection_origin - ) + self.longitude_of_projection_origin = float(longitude_of_projection_origin) #: Altitude of satellite in metres. self.perspective_point_height = float(perspective_point_height) @@ -982,19 +956,14 @@ def __init__( """ #: True latitude of planar origin in degrees. - self.latitude_of_projection_origin = float( - latitude_of_projection_origin - ) + self.latitude_of_projection_origin = float(latitude_of_projection_origin) if self.latitude_of_projection_origin != 0.0: raise ValueError( - "Non-zero latitude of projection currently not" - " supported by Cartopy." + "Non-zero latitude of projection currently not supported by Cartopy." ) #: True longitude of planar origin in degrees. - self.longitude_of_projection_origin = float( - longitude_of_projection_origin - ) + self.longitude_of_projection_origin = float(longitude_of_projection_origin) #: Altitude of satellite in metres. self.perspective_point_height = float(perspective_point_height) @@ -1111,9 +1080,7 @@ def __init__( self.false_northing = _arg_default(false_northing, 0) #: Latitude of true scale. - self.true_scale_lat = _arg_default( - true_scale_lat, None, cast_as=_float_or_None - ) + self.true_scale_lat = _arg_default(true_scale_lat, None, cast_as=_float_or_None) #: Scale factor at projection origin. self.scale_factor_at_projection_origin = _arg_default( scale_factor_at_projection_origin, None, cast_as=_float_or_None @@ -1699,14 +1666,10 @@ def __init__( self.azimuth_of_central_line = float(azimuth_of_central_line) #: True latitude of planar origin in degrees. - self.latitude_of_projection_origin = float( - latitude_of_projection_origin - ) + self.latitude_of_projection_origin = float(latitude_of_projection_origin) #: True longitude of planar origin in degrees. - self.longitude_of_projection_origin = float( - longitude_of_projection_origin - ) + self.longitude_of_projection_origin = float(longitude_of_projection_origin) #: X offset from planar origin in metres. self.false_easting = _arg_default(false_easting, 0) diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 8af7ee0c8a..3aeef122f2 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -535,9 +535,7 @@ def reindent_data_string(text, n_indent): bounds_text = "bounds: " if "\n" in bounds_array_str: # Put initial '[' here, and the rest on subsequent lines - bounds_text += ( - "[" + newline_indent + indent + bounds_array_str[1:] - ) + bounds_text += "[" + newline_indent + indent + bounds_array_str[1:] else: # All on one line bounds_text += bounds_array_str @@ -758,9 +756,7 @@ def pointwise_convert(values): return old_unit.convert(values, new_unit) if self._has_lazy_values(): - new_values = _lazy.lazy_elementwise( - self._lazy_values(), pointwise_convert - ) + new_values = _lazy.lazy_elementwise(self._lazy_values(), pointwise_convert) else: new_values = self.units.convert(self._values, unit) self._values = new_values @@ -860,17 +856,13 @@ def xml_element(self, doc): element.setAttribute("units", repr(self.units)) if isinstance(self, Coord): if self.climatological: - element.setAttribute( - "climatological", str(self.climatological) - ) + element.setAttribute("climatological", str(self.climatological)) if self.attributes: attributes_element = doc.createElement("attributes") for name in sorted(self.attributes.keys()): attribute_element = doc.createElement("attribute") attribute_element.setAttribute("name", name) - attribute_element.setAttribute( - "value", str(self.attributes[name]) - ) + attribute_element.setAttribute("value", str(self.attributes[name])) attributes_element.appendChild(attribute_element) element.appendChild(attributes_element) @@ -981,9 +973,7 @@ def __init__( """ # Configure the metadata manager. if not hasattr(self, "_metadata_manager"): - self._metadata_manager = metadata_manager_factory( - AncillaryVariableMetadata - ) + self._metadata_manager = metadata_manager_factory(AncillaryVariableMetadata) super().__init__( values=data, @@ -1316,7 +1306,7 @@ def __new__(cls, point=None, bound=None): if isinstance(point, (tuple, list)): if len(point) != 1: raise ValueError( - "Point may only be a list or tuple if it has " "length 1." + "Point may only be a list or tuple if it has length 1." ) point = point[0] @@ -1356,9 +1346,7 @@ def __eq__(self, other): compared. """ - if isinstance(other, (int, float, np.number)) or hasattr( - other, "timetuple" - ): + if isinstance(other, (int, float, np.number)) or hasattr(other, "timetuple"): if self.bound is not None: return self.contains_point(other) else: @@ -1403,9 +1391,7 @@ def __common_cmp__(self, other, operator_method): isinstance(other, (int, float, np.number, Cell)) or hasattr(other, "timetuple") ): - raise TypeError( - "Unexpected type of other " "{}.".format(type(other)) - ) + raise TypeError("Unexpected type of other {}.".format(type(other))) if operator_method not in ( operator.gt, operator.lt, @@ -1452,9 +1438,7 @@ def __common_cmp__(self, other, operator_method): if self.bound[1] == other.bound[1]: result = operator_method(self.point, other.point) else: - result = operator_method( - self.bound[1], other.bound[1] - ) + result = operator_method(self.bound[1], other.bound[1]) else: result = operator_method(self.bound[0], other.bound[0]) else: @@ -1618,9 +1602,7 @@ def copy(self, points=None, bounds=None): """ if points is None and bounds is not None: - raise ValueError( - "If bounds are specified, points must also be " "specified" - ) + raise ValueError("If bounds are specified, points must also be specified") new_coord = super().copy(values=points) if points is not None: @@ -1694,13 +1676,8 @@ def bounds(self, bounds): else: bounds = self._sanitise_array(bounds, 2) if self.shape != bounds.shape[:-1]: - raise ValueError( - "Bounds shape must be compatible with points " "shape." - ) - if ( - not self.has_bounds() - or self.core_bounds().shape != bounds.shape - ): + raise ValueError("Bounds shape must be compatible with points shape.") + if not self.has_bounds() or self.core_bounds().shape != bounds.shape: # Construct a new bounds DataManager. self._bounds_dm = DataManager(bounds) else: @@ -1983,12 +1960,8 @@ def mod360_adjust(compare_axis): # 3---2 + 3---2 # | | | | # 0---1 + 0---1 - upper_bounds = np.stack( - (bounds[:, :-1, 1], bounds[:, :-1, 2]) - ) - lower_bounds = np.stack( - (bounds[:, 1:, 0], bounds[:, 1:, 3]) - ) + upper_bounds = np.stack((bounds[:, :-1, 1], bounds[:, :-1, 2])) + lower_bounds = np.stack((bounds[:, 1:, 0], bounds[:, 1:, 3])) elif compare_axis == "y": # Extract the pairs of upper bounds and lower bounds which # connect along the "y" axis. These connect along indices @@ -2001,12 +1974,8 @@ def mod360_adjust(compare_axis): # 3---2 # | | # 0---1 - upper_bounds = np.stack( - (bounds[:-1, :, 3], bounds[:-1, :, 2]) - ) - lower_bounds = np.stack( - (bounds[1:, :, 0], bounds[1:, :, 1]) - ) + upper_bounds = np.stack((bounds[:-1, :, 3], bounds[:-1, :, 2])) + lower_bounds = np.stack((bounds[1:, :, 0], bounds[1:, :, 1])) if self.name() in ["longitude", "grid_longitude"]: # If longitude, adjust for longitude wrapping @@ -2125,9 +2094,7 @@ def is_monotonic(self): if self.has_bounds(): for b_index in range(self.nbounds): - if not iris.util.monotonic( - self.bounds[..., b_index], strict=True - ): + if not iris.util.monotonic(self.bounds[..., b_index], strict=True): return False return True @@ -2247,9 +2214,7 @@ def serialize(x): points = serialize(self.points) dtype = np.dtype("U{}".format(len(points))) # Create the new collapsed coordinate. - coord = self.copy( - points=np.array(points, dtype=dtype), bounds=bounds - ) + coord = self.copy(points=np.array(points, dtype=dtype), bounds=bounds) else: # Collapse the coordinate by calculating the bounded extremes. if self.ndim > 1: @@ -2343,9 +2308,7 @@ def _guess_bounds(self, bound_position=0.5): raise iris.exceptions.CoordinateMultiDimError(self) if self.shape[0] < 2: - raise ValueError( - "Cannot guess bounds for a coordinate of length " "1." - ) + raise ValueError("Cannot guess bounds for a coordinate of length 1.") if self.has_bounds(): raise ValueError( @@ -2370,10 +2333,7 @@ def _guess_bounds(self, bound_position=0.5): bounds = np.array([min_bounds, max_bounds]).transpose() - if ( - self.name() in ("latitude", "grid_latitude") - and self.units == "degree" - ): + if self.name() in ("latitude", "grid_latitude") and self.units == "degree": points = self.points if (points >= -90).all() and (points <= 90).all(): np.clip(bounds, -90, 90, out=bounds) @@ -2807,9 +2767,7 @@ def collapsed(self, dims_to_collapse=None): bnds = coord.bounds.copy() bnds[0, 1] = coord.bounds[0, 0] + self.units.modulus coord.bounds = bnds - coord.points = np.array( - np.sum(coord.bounds) * 0.5, dtype=self.points.dtype - ) + coord.points = np.array(np.sum(coord.bounds) * 0.5, dtype=self.points.dtype) # XXX This isn't actually correct, but is ported from the old world. coord.circular = False return coord @@ -2904,10 +2862,7 @@ def _new_bounds_requirements(self, bounds): bounds[:, b_index], strict=True, return_direction=True ) if not monotonic: - emsg = ( - "The {!r} {} bounds array must be strictly " - "monotonic." - ) + emsg = "The {!r} {} bounds array must be strictly monotonic." raise ValueError( emsg.format(self.name(), self.__class__.__name__) ) @@ -2918,9 +2873,7 @@ def _new_bounds_requirements(self, bounds): "The direction of monotonicity for {!r} {} must " "be consistent across all bounds." ) - raise ValueError( - emsg.format(self.name(), self.__class__.__name__) - ) + raise ValueError(emsg.format(self.name(), self.__class__.__name__)) if n_bounds == 2: # Make ordering of bounds consistent with coord's direction @@ -3088,9 +3041,7 @@ def __init__(self, method, coords=None, intervals=None, comments=None): """ if not isinstance(method, str): - raise TypeError( - "'method' must be a string - got a '%s'" % type(method) - ) + raise TypeError("'method' must be a string - got a '%s'" % type(method)) default_name = BaseMetadata.DEFAULT_NAME _coords = [] diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 0018a5abe8..c1f8069195 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -151,9 +151,7 @@ def merged(self, unique=False): duplicate cubes are detected. """ - return _CubeFilterCollection( - [pair.merged(unique) for pair in self.pairs] - ) + return _CubeFilterCollection([pair.merged(unique) for pair in self.pairs]) class CubeList(list): @@ -174,8 +172,7 @@ def __init__(self, *args, **kwargs): def __str__(self): """Runs short :meth:`Cube.summary` on every cube.""" result = [ - "%s: %s" % (i, cube.summary(shorten=True)) - for i, cube in enumerate(self) + "%s: %s" % (i, cube.summary(shorten=True)) for i, cube in enumerate(self) ] if result: result = "\n".join(result) @@ -190,10 +187,7 @@ def __repr__(self): @staticmethod def _assert_is_cube(obj): if not hasattr(obj, "add_aux_coord"): - msg = ( - r"Object {obj} cannot be put in a cubelist, " - "as it is not a Cube." - ) + msg = r"Object {obj} cannot be put in a cubelist, as it is not a Cube." raise ValueError(msg) def _repr_html_(self): @@ -345,9 +339,7 @@ def extract_cubes(self, constraints): ) @staticmethod - def _extract_and_merge( - cubes, constraints, strict=False, return_single_cube=False - ): + def _extract_and_merge(cubes, constraints, strict=False, return_single_cube=False): constraints = iris._constraints.list_of_constraints(constraints) # group the resultant cubes by constraints in a dictionary @@ -364,7 +356,7 @@ def _extract_and_merge( for constraint in constraints: constraint_cubes = constraint_groups[constraint] if strict and len(constraint_cubes) != 1: - msg = "Got %s cubes for constraint %r, " "expecting 1." % ( + msg = "Got %s cubes for constraint %r, expecting 1." % ( len(constraint_cubes), constraint, ) @@ -401,15 +393,12 @@ def extract_overlapping(self, coord_names): def make_overlap_fn(coord_name): def overlap_fn(cell): - return all( - cell in cube.coord(coord_name).cells() for cube in self - ) + return all(cell in cube.coord(coord_name).cells() for cube in self) return overlap_fn coord_values = { - coord_name: make_overlap_fn(coord_name) - for coord_name in coord_names + coord_name: make_overlap_fn(coord_name) for coord_name in coord_names } return self.extract(iris.Constraint(coord_values=coord_values)) @@ -617,16 +606,13 @@ def concatenate_cube( msgs = [] msgs.append("An unexpected problem prevented concatenation.") msgs.append( - "Expected only a single cube, " - "found {}.".format(n_res_cubes) + "Expected only a single cube, found {}.".format(n_res_cubes) ) raise iris.exceptions.ConcatenateError(msgs) else: msgs = [] msgs.append( - "Cube names differ: {} != {}".format( - unique_names[0], unique_names[1] - ) + "Cube names differ: {} != {}".format(unique_names[0], unique_names[1]) ) raise iris.exceptions.ConcatenateError(msgs) @@ -1435,9 +1421,7 @@ def _dimensional_metadata(self, name_or_dimensional_metadata): except KeyError: pass if not found_item: - raise KeyError( - f"{name_or_dimensional_metadata} was not found in {self}." - ) + raise KeyError(f"{name_or_dimensional_metadata} was not found in {self}.") return found_item def is_compatible(self, other, ignore=None): @@ -1524,9 +1508,7 @@ def convert_units(self, unit): pointwise_convert = partial(old_unit.convert, other=new_unit) - new_data = _lazy.lazy_elementwise( - self.lazy_data(), pointwise_convert - ) + new_data = _lazy.lazy_elementwise(self.lazy_data(), pointwise_convert) else: new_data = self.units.convert(self.data, unit) self.data = new_data @@ -1577,9 +1559,7 @@ def _check_multi_dim_metadata(self, metadata, data_dims): if len(data_dims) != metadata.ndim: msg = ( "Invalid data dimensions: {} given, {} expected for " - "{!r}.".format( - len(data_dims), metadata.ndim, metadata.name() - ) + "{!r}.".format(len(data_dims), metadata.ndim, metadata.name()) ) raise iris.exceptions.CannotAddError(msg) # Check compatibility with the shape of the data @@ -1658,8 +1638,7 @@ def add_aux_factory(self, aux_factory): """ if not isinstance(aux_factory, iris.aux_factory.AuxCoordFactory): raise TypeError( - "Factory must be a subclass of " - "iris.aux_factory.AuxCoordFactory." + "Factory must be a subclass of iris.aux_factory.AuxCoordFactory." ) # Get all 'real' coords (i.e. not derived ones) : use private data @@ -1738,12 +1717,8 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): "Duplicate ancillary variables not permitted" ) - data_dims = self._check_multi_dim_metadata( - ancillary_variable, data_dims - ) - self._ancillary_variables_and_dims.append( - (ancillary_variable, data_dims) - ) + data_dims = self._check_multi_dim_metadata(ancillary_variable, data_dims) + self._ancillary_variables_and_dims.append((ancillary_variable, data_dims)) self._ancillary_variables_and_dims.sort( key=lambda av_dims: (av_dims[0].metadata, av_dims[1]) ) @@ -1774,8 +1749,7 @@ def add_dim_coord(self, dim_coord, data_dim): # Check dimension is available if self.coords(dimensions=data_dim, dim_coords=True): raise iris.exceptions.CannotAddError( - "A dim_coord is already associated with " - "dimension %d." % data_dim + "A dim_coord is already associated with dimension %d." % data_dim ) self._add_unique_dim_coord(dim_coord, data_dim) @@ -1789,7 +1763,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim): if isinstance(data_dim, Container): if len(data_dim) != 1: raise iris.exceptions.CannotAddError( - "The supplied data dimension must be a" " single number." + "The supplied data dimension must be a single number." ) data_dim = int(list(data_dim)[0]) else: @@ -1798,8 +1772,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim): # Check data_dim value is valid if data_dim < 0 or data_dim >= self.ndim: raise iris.exceptions.CannotAddError( - "The cube does not have the specified dimension " - "(%d)" % data_dim + "The cube does not have the specified dimension (%d)" % data_dim ) # Check compatibility with the shape of the data @@ -1978,9 +1951,7 @@ def matcher(factory): return factory.metadata == target_metadata factories = filter(matcher, self._aux_factories) - matches = [ - factory.derived_dims(self.coord_dims) for factory in factories - ] + matches = [factory.derived_dims(self.coord_dims) for factory in factories] if matches: match = matches[0] @@ -2003,9 +1974,7 @@ def cell_measure_dims(self, cell_measure): # Search for existing cell measure (object) on the cube, faster lookup # than equality - makes no functional difference. matches = [ - dims - for cm_, dims in self._cell_measures_and_dims - if cm_ is cell_measure + dims for cm_, dims in self._cell_measures_and_dims if cm_ is cell_measure ] if not matches: @@ -2039,9 +2008,7 @@ def ancillary_variable_dims(self, ancillary_variable): return matches[0] - def aux_factory( - self, name=None, standard_name=None, long_name=None, var_name=None - ): + def aux_factory(self, name=None, standard_name=None, long_name=None, var_name=None): """ Returns the single coordinate factory that matches the criteria, or raises an error if not found. @@ -2070,9 +2037,7 @@ def aux_factory( factories = self.aux_factories if name is not None: - factories = [ - factory for factory in factories if factory.name() == name - ] + factories = [factory for factory in factories if factory.name() == name] if standard_name is not None: factories = [ @@ -2083,16 +2048,12 @@ def aux_factory( if long_name is not None: factories = [ - factory - for factory in factories - if factory.long_name == long_name + factory for factory in factories if factory.long_name == long_name ] if var_name is not None: factories = [ - factory - for factory in factories - if factory.var_name == var_name + factory for factory in factories if factory.var_name == var_name ] if len(factories) > 1: @@ -2105,10 +2066,7 @@ def aux_factory( ) raise iris.exceptions.CoordinateNotFoundError(msg) elif len(factories) == 0: - msg = ( - "Expected to find exactly one coordinate factory, but " - "found none." - ) + msg = "Expected to find exactly one coordinate factory, but found none." raise iris.exceptions.CoordinateNotFoundError(msg) return factories[0] @@ -2218,16 +2176,12 @@ def coords( if mesh_coords: # *only* MeshCoords coords_and_factories = [ - item - for item in coords_and_factories - if hasattr(item, "mesh") + item for item in coords_and_factories if hasattr(item, "mesh") ] else: # *not* MeshCoords coords_and_factories = [ - item - for item in coords_and_factories - if not hasattr(item, "mesh") + item for item in coords_and_factories if not hasattr(item, "mesh") ] coords_and_factories = metadata_filter( @@ -2272,15 +2226,14 @@ def extract_coord(coord_or_factory): elif isinstance(coord_or_factory, iris.coords.Coord): coord = coord_or_factory else: - msg = "Expected Coord or AuxCoordFactory, got " "{!r}.".format( + msg = "Expected Coord or AuxCoordFactory, got {!r}.".format( type(coord_or_factory) ) raise ValueError(msg) return coord coords = [ - extract_coord(coord_or_factory) - for coord_or_factory in coords_and_factories + extract_coord(coord_or_factory) for coord_or_factory in coords_and_factories ] return coords @@ -2457,9 +2410,7 @@ def coord_system(self, spec=None): result = None if spec_name is None: - for key in sorted( - coord_systems.keys(), key=lambda class_: class_.__name__ - ): + for key in sorted(coord_systems.keys(), key=lambda class_: class_.__name__): result = coord_systems[key] break else: @@ -2608,9 +2559,7 @@ def cell_measure(self, name_or_cell_measure=None): if isinstance(name_or_cell_measure, str): bad_name = name_or_cell_measure else: - bad_name = ( - name_or_cell_measure and name_or_cell_measure.name() - ) or "" + bad_name = (name_or_cell_measure and name_or_cell_measure.name()) or "" if name_or_cell_measure is not None: emsg = ( "Expected to find exactly 1 cell measure matching the given " @@ -2682,9 +2631,7 @@ def ancillary_variable(self, name_or_ancillary_variable=None): for full keyword documentation. """ - ancillary_variables = self.ancillary_variables( - name_or_ancillary_variable - ) + ancillary_variables = self.ancillary_variables(name_or_ancillary_variable) if len(ancillary_variables) > 1: msg = ( @@ -2701,8 +2648,7 @@ def ancillary_variable(self, name_or_ancillary_variable=None): bad_name = name_or_ancillary_variable else: bad_name = ( - name_or_ancillary_variable - and name_or_ancillary_variable.name() + name_or_ancillary_variable and name_or_ancillary_variable.name() ) or "" if name_or_ancillary_variable is not None: emsg = ( @@ -2937,9 +2883,7 @@ def __str__(self): return self.summary() def __repr__(self): - return "" % self.summary( - shorten=True, name_padding=1 - ) + return "" % self.summary(shorten=True, name_padding=1) def _repr_html_(self): from iris.experimental.representation import CubeRepresentation @@ -2988,9 +2932,7 @@ def new_ancillary_variable_dims(av_): cube_data = self._data_manager.core_data() # Index with the keys, using orthogonal slicing. - dimension_mapping, data = iris.util._slice_data_with_keys( - cube_data, keys - ) + dimension_mapping, data = iris.util._slice_data_with_keys(cube_data, keys) # We don't want a view of the data, so take a copy of it. data = deepcopy(data) @@ -2999,10 +2941,7 @@ def new_ancillary_variable_dims(av_): # results in numpy (v1.11.1) *always* returning a MaskedConstant # with a dtype of float64, regardless of the original masked # array dtype! - if ( - isinstance(data, ma.core.MaskedConstant) - and data.dtype != cube_data.dtype - ): + if isinstance(data, ma.core.MaskedConstant) and data.dtype != cube_data.dtype: data = ma.array(data.data, mask=data.mask, dtype=cube_data.dtype) # Make the new cube slice @@ -3015,9 +2954,7 @@ def new_ancillary_variable_dims(av_): # Slice the coords for coord in self.aux_coords: - coord_keys = tuple( - [full_slice[dim] for dim in self.coord_dims(coord)] - ) + coord_keys = tuple([full_slice[dim] for dim in self.coord_dims(coord)]) try: new_coord = coord[coord_keys] except ValueError: @@ -3028,9 +2965,7 @@ def new_ancillary_variable_dims(av_): coord_mapping[id(coord)] = new_coord for coord in self.dim_coords: - coord_keys = tuple( - [full_slice[dim] for dim in self.coord_dims(coord)] - ) + coord_keys = tuple([full_slice[dim] for dim in self.coord_dims(coord)]) new_dims = new_coord_dims(coord) # Try/Catch to handle slicing that makes the points/bounds # non-monotonic @@ -3064,9 +2999,7 @@ def new_ancillary_variable_dims(av_): dims = self.ancillary_variable_dims(ancvar) av_keys = tuple([full_slice[dim] for dim in dims]) new_av = ancvar[av_keys] - cube.add_ancillary_variable( - new_av, new_ancillary_variable_dims(ancvar) - ) + cube.add_ancillary_variable(new_av, new_ancillary_variable_dims(ancvar)) return cube @@ -3084,10 +3017,7 @@ def subset(self, coord): coord_to_extract = self.coord(coord) # If scalar, return the whole cube. Not possible to subset 1 point. - if ( - coord_to_extract in self.aux_coords - and len(coord_to_extract.points) == 1 - ): + if coord_to_extract in self.aux_coords and len(coord_to_extract.points) == 1: # Default to returning None result = None @@ -3108,9 +3038,7 @@ def subset(self, coord): # Identify the indices which intersect the requested coord and # coord_to_extract - coord_indices = coord_to_extract.intersect( - coord, return_indices=True - ) + coord_indices = coord_to_extract.intersect(coord, return_indices=True) if coord_indices.size == 0: # No matches found. @@ -3244,9 +3172,7 @@ def _intersect( raise ValueError("minimum greater than maximum") modulus = coord.units.modulus if modulus is None: - raise ValueError( - "coordinate units with no modulus are not yet supported" - ) + raise ValueError("coordinate units with no modulus are not yet supported") subsets, points, bounds = self._intersect_modulus( coord, minimum, @@ -3308,25 +3234,17 @@ def create_coords(src_coords, add_coord): if dim in dims: dim_within_coord = dims.index(dim) points = np.concatenate( - [ - chunk.coord(src_coord).points - for chunk in chunks - ], + [chunk.coord(src_coord).points for chunk in chunks], dim_within_coord, ) if src_coord.has_bounds(): bounds = np.concatenate( - [ - chunk.coord(src_coord).bounds - for chunk in chunks - ], + [chunk.coord(src_coord).bounds for chunk in chunks], dim_within_coord, ) else: bounds = None - result_coord = src_coord.copy( - points=points, bounds=bounds - ) + result_coord = src_coord.copy(points=points, bounds=bounds) circular = getattr(result_coord, "circular", False) if circular and not preserve_circular: @@ -3398,9 +3316,7 @@ def dim_coord_subset(): ) if edge_equal_base_period: points[index_end] = coord.points[index_end] - subsets = [ - slice(inside_indices[0], inside_indices[-1] + 1) - ] + subsets = [slice(inside_indices[0], inside_indices[-1] + 1)] # Either no edge wrap or edge wrap != base + period # i.e. derive subset without alteration @@ -3439,9 +3355,7 @@ def _intersect_modulus( ): modulus = coord.units.modulus if maximum > minimum + modulus: - raise ValueError( - "requested range greater than coordinate's unit's modulus" - ) + raise ValueError("requested range greater than coordinate's unit's modulus") if coord.has_bounds(): values = coord.bounds else: @@ -3468,9 +3382,7 @@ def _intersect_modulus( # Check points only (inside_indices,) = np.where( - np.logical_and( - min_comp(minimum, points), max_comp(points, maximum) - ) + np.logical_and(min_comp(minimum, points), max_comp(points, maximum)) ) else: @@ -3514,18 +3426,14 @@ def _intersect_modulus( upper = bounds[iupper] lower = bounds[ilower] overlap = np.where( - np.logical_and( - min_comp(minimum, upper), max_comp(lower, maximum) - ), + np.logical_and(min_comp(minimum, upper), max_comp(lower, maximum)), np.minimum(maximum, upper) - np.maximum(minimum, lower), np.nan, ) (inside_indices,) = np.where(overlap >= thresholds) # Determine the subsets - subsets = self._intersect_derive_subset( - coord, points, bounds, inside_indices - ) + subsets = self._intersect_derive_subset(coord, points, bounds, inside_indices) return subsets, points, bounds def _as_list_of_coords(self, names_or_coords): @@ -3668,8 +3576,7 @@ def slices(self, ref_to_slice, ordered=True): dim = int(ref) except ValueError: raise ValueError( - "{} Incompatible type {} for " - "slicing".format(ref, type(ref)) + "{} Incompatible type {} for slicing".format(ref, type(ref)) ) if dim < 0 or dim > self.ndim: msg = ( @@ -3837,9 +3744,7 @@ def dimmeta_xml_element(element, typename, dimscall): cms_xml_element = doc.createElement("cellMeasures") for cm in cell_measures: cms_xml_element.appendChild( - dimmeta_xml_element( - cm, "cell-measure", self.cell_measure_dims - ) + dimmeta_xml_element(cm, "cell-measure", self.cell_measure_dims) ) cube_xml_element.appendChild(cms_xml_element) @@ -3879,14 +3784,10 @@ def normalise(data): # sensitive to unused numbers. Use a fixed value so # a change in fill_value doesn't affect the # checksum. - crc = "0x%08x" % ( - zlib.crc32(normalise(data.filled(0))) & 0xFFFFFFFF, - ) + crc = "0x%08x" % (zlib.crc32(normalise(data.filled(0))) & 0xFFFFFFFF,) data_xml_element.setAttribute("checksum", crc) if ma.is_masked(data): - crc = "0x%08x" % ( - zlib.crc32(normalise(data.mask)) & 0xFFFFFFFF, - ) + crc = "0x%08x" % (zlib.crc32(normalise(data.mask)) & 0xFFFFFFFF,) else: crc = "no-masked-elements" data_xml_element.setAttribute("mask_checksum", crc) @@ -3952,9 +3853,7 @@ def copy(self, data=None): def __copy__(self): """Shallow copying is disallowed for Cubes.""" - raise copy.Error( - "Cube shallow-copy not allowed. Use deepcopy() or " "Cube.copy()" - ) + raise copy.Error("Cube shallow-copy not allowed. Use deepcopy() or Cube.copy()") def __deepcopy__(self, memo): return self._deepcopy(memo) @@ -3964,9 +3863,7 @@ def _deepcopy(self, memo, data=None): new_dim_coords_and_dims = deepcopy(self._dim_coords_and_dims, memo) new_aux_coords_and_dims = deepcopy(self._aux_coords_and_dims, memo) - new_cell_measures_and_dims = deepcopy( - self._cell_measures_and_dims, memo - ) + new_cell_measures_and_dims = deepcopy(self._cell_measures_and_dims, memo) new_ancillary_variables_and_dims = deepcopy( self._ancillary_variables_and_dims, memo ) @@ -4009,8 +3906,8 @@ def __eq__(self, other): # having checked the metadata, now check the coordinates if result: - coord_compares = ( - iris.analysis._dimensional_metadata_comparison(self, other) + coord_compares = iris.analysis._dimensional_metadata_comparison( + self, other ) # if there are any coordinates which are not equal result = not ( @@ -4024,8 +3921,7 @@ def __eq__(self, other): ) # if there are any cell measures which are not equal result = not ( - cm_compares["not_equal"] - or cm_compares["non_equal_data_dimension"] + cm_compares["not_equal"] or cm_compares["non_equal_data_dimension"] ) if result: @@ -4034,17 +3930,14 @@ def __eq__(self, other): ) # if there are any ancillary variables which are not equal result = not ( - av_compares["not_equal"] - or av_compares["non_equal_data_dimension"] + av_compares["not_equal"] or av_compares["non_equal_data_dimension"] ) # Having checked everything else, check approximate data equality. if result: # TODO: why do we use allclose() here, but strict equality in # _DimensionalMetadata (via util.array_equal())? - result = da.allclose( - self.core_data(), other.core_data() - ).compute() + result = da.allclose(self.core_data(), other.core_data()).compute() return result # Must supply __ne__, Python does not defer to __eq__ for negative equality @@ -4223,9 +4116,7 @@ def collapsed(self, coords, aggregator, **kwargs): aggregator, iris.analysis.WeightedAggregator ) and not aggregator.uses_weighting(**kwargs): msg = "Collapsing spatial coordinate {!r} without weighting" - lat_match = [ - coord for coord in coords if "latitude" in coord.name() - ] + lat_match = [coord for coord in coords if "latitude" in coord.name()] if lat_match: for coord in lat_match: warnings.warn( @@ -4235,14 +4126,10 @@ def collapsed(self, coords, aggregator, **kwargs): # Determine the dimensions we need to collapse (and those we don't) if aggregator.cell_method == "peak": - dims_to_collapse = [ - list(self.coord_dims(coord)) for coord in coords - ] + dims_to_collapse = [list(self.coord_dims(coord)) for coord in coords] # Remove duplicate dimensions. - new_dims = OrderedDict.fromkeys( - d for dim in dims_to_collapse for d in dim - ) + new_dims = OrderedDict.fromkeys(d for dim in dims_to_collapse for d in dim) # Reverse the dimensions so the order can be maintained when # reshaping the data. dims_to_collapse = list(new_dims)[::-1] @@ -4256,17 +4143,12 @@ def collapsed(self, coords, aggregator, **kwargs): raise ValueError(msg) if not dims_to_collapse: - msg = ( - "Cannot collapse a dimension which does not describe any " - "data." - ) + msg = "Cannot collapse a dimension which does not describe any data." raise iris.exceptions.CoordinateCollapseError(msg) untouched_dims = set(range(self.ndim)) - set(dims_to_collapse) - collapsed_cube = iris.util._strip_metadata_from_dims( - self, dims_to_collapse - ) + collapsed_cube = iris.util._strip_metadata_from_dims(self, dims_to_collapse) # Remove the collapsed dimension(s) from the metadata indices = [slice(None, None)] * self.ndim @@ -4301,14 +4183,10 @@ def collapsed(self, coords, aggregator, **kwargs): new_shape = untouched_shape + collapsed_shape array_dims = untouched_dims + dims_to_collapse - unrolled_data = np.transpose(self.data, array_dims).reshape( - new_shape - ) + unrolled_data = np.transpose(self.data, array_dims).reshape(new_shape) for dim in dims_to_collapse: - unrolled_data = aggregator.aggregate( - unrolled_data, axis=-1, **kwargs - ) + unrolled_data = aggregator.aggregate(unrolled_data, axis=-1, **kwargs) data_result = unrolled_data # Perform the aggregation in lazy form if possible. @@ -4353,13 +4231,9 @@ def collapsed(self, coords, aggregator, **kwargs): if weights is not None and weights.ndim > 1: # Note: *don't* adjust 1d weights arrays, these have a special meaning for statistics functions. weights = weights.view() - kwargs["weights"] = np.transpose(weights, dims).reshape( - new_shape - ) + kwargs["weights"] = np.transpose(weights, dims).reshape(new_shape) - data_result = aggregator.aggregate( - unrolled_data, axis=-1, **kwargs - ) + data_result = aggregator.aggregate(unrolled_data, axis=-1, **kwargs) aggregator.update_metadata( collapsed_cube, @@ -4368,14 +4242,10 @@ def collapsed(self, coords, aggregator, **kwargs): _weights_units=getattr(weights_info, "units", None), **kwargs, ) - result = aggregator.post_process( - collapsed_cube, data_result, coords, **kwargs - ) + result = aggregator.post_process(collapsed_cube, data_result, coords, **kwargs) return result - def aggregated_by( - self, coords, aggregator, climatological=False, **kwargs - ): + def aggregated_by(self, coords, aggregator, climatological=False, **kwargs): """ Perform aggregation over the cube given one or more "group coordinates". @@ -4574,9 +4444,7 @@ def aggregated_by( # Create data and weights slices. front_slice = (slice(None),) * dimension_to_groupby - back_slice = (slice(None),) * ( - len(data_shape) - dimension_to_groupby - 1 - ) + back_slice = (slice(None),) * (len(data_shape) - dimension_to_groupby - 1) groupby_subarrs = map( lambda groupby_slice: iris.util._slice_data_with_keys( @@ -4608,17 +4476,13 @@ def aggregated_by( # before combining the different slices. if return_weights: result, weights_result = list(zip(*result)) - aggregateby_weights = stack( - weights_result, axis=dimension_to_groupby - ) + aggregateby_weights = stack(weights_result, axis=dimension_to_groupby) else: aggregateby_weights = None aggregateby_data = stack(result, axis=dimension_to_groupby) # Ensure plain ndarray is output if plain ndarray was input. - if ma.isMaskedArray(aggregateby_data) and not ma.isMaskedArray( - input_data - ): + if ma.isMaskedArray(aggregateby_data) and not ma.isMaskedArray(input_data): aggregateby_data = ma.getdata(aggregateby_data) # Add the aggregation meta data to the aggregate-by cube. @@ -4653,9 +4517,7 @@ def aggregated_by( ): aggregateby_cube.add_dim_coord(new_coord, dimension_to_groupby) else: - aggregateby_cube.add_aux_coord( - new_coord, self.coord_dims(lookup_coord) - ) + aggregateby_cube.add_aux_coord(new_coord, self.coord_dims(lookup_coord)) coord_mapping[id(self.coord(lookup_coord))] = new_coord aggregateby_cube._aux_factories = [] @@ -4780,8 +4642,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): if window < 2: raise ValueError( - "Cannot perform rolling window " - "with a window size less than 2." + "Cannot perform rolling window with a window size less than 2." ) if coord.ndim > 1: @@ -4836,9 +4697,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): # are the serialized form of the points contributing to each # window and the bounds are the first and last points in the # window as with numeric coordinates. - new_points = np.apply_along_axis( - lambda x: "|".join(x), -1, new_bounds - ) + new_points = np.apply_along_axis(lambda x: "|".join(x), -1, new_bounds) new_bounds = new_bounds[:, (0, -1)] else: # Take the first and last element of the rolled window (i.e. @@ -4880,9 +4739,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): data_result = aggregator.aggregate( rolling_window_data, axis=dimension + 1, **kwargs ) - result = aggregator.post_process( - new_cube, data_result, [coord], **kwargs - ) + result = aggregator.post_process(new_cube, data_result, [coord], **kwargs) return result def interpolate(self, sample_points, scheme, collapse_scalar=True): @@ -5025,9 +4882,7 @@ class ClassDict(MutableMapping): def __init__(self, superclass): if not isinstance(superclass, type): - raise TypeError( - "The superclass must be a Python type or new " "style class." - ) + raise TypeError("The superclass must be a Python type or new style class.") self._superclass = superclass self._basic_map = {} self._retrieval_map = {} diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index c3e6b6193f..ea788e7c18 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -127,8 +127,7 @@ def __init__(self, differences): def __str__(self): return "\n ".join( - ["failed to concatenate into a single cube."] - + list(self.differences) + ["failed to concatenate into a single cube."] + list(self.differences) ) diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 5cc3b4f710..7f824df690 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -41,9 +41,7 @@ } -def _gdal_write_array( - x_min, x_step, y_max, y_step, coord_system, data, fname, ftype -): +def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, ftype): """ Use GDAL WriteArray to export data as a 32-bit raster image. Requires the array data to be of the form: North-at-top @@ -137,9 +135,7 @@ def export_geotiff(cube, fname): coord_y = cube.coord(axis="Y", dim_coords=True) if coord_x.bounds is None or coord_y.bounds is None: - raise ValueError( - "Coordinates must have bounds, consider using " "guess_bounds()" - ) + raise ValueError("Coordinates must have bounds, consider using guess_bounds()") if ( coord_x is None @@ -152,9 +148,7 @@ def export_geotiff(cube, fname): for coord in [coord_x, coord_y]: name = coord.name() if coord.nbounds != 2: - msg = "Coordinate {!r} must have two bounds " "per point.".format( - name - ) + msg = "Coordinate {!r} must have two bounds per point.".format(name) raise ValueError(msg) if not ( coord.units == cf_units.Unit("degrees") @@ -165,20 +159,15 @@ def export_geotiff(cube, fname): "convertible to meters.".format(name) ) if not coord.is_contiguous(): - raise ValueError( - "Coordinate {!r} bounds must be " "contiguous.".format(name) - ) + raise ValueError("Coordinate {!r} bounds must be contiguous.".format(name)) xy_step.append(np.diff(coord.bounds[0])) if not np.allclose(np.diff(coord.bounds), xy_step[-1]): - msg = "Coordinate {!r} bounds must be regularly " "spaced.".format( - name - ) + msg = "Coordinate {!r} bounds must be regularly spaced.".format(name) raise ValueError(msg) if coord_x.points[0] > coord_x.points[-1]: raise ValueError( - "Coordinate {!r} x-points must be monotonically" - "increasing.".format(name) + "Coordinate {!r} x-points must be monotonically increasing.".format(name) ) data = cube.data @@ -205,6 +194,4 @@ def export_geotiff(cube, fname): x_min = np.min(x_bounds) y_max = np.max(coord_y.bounds) - _gdal_write_array( - x_min, x_step, y_max, y_step, coord_system, data, fname, "GTiff" - ) + _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, "GTiff") diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index f35a483b01..372fec7a9f 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -30,11 +30,7 @@ _regrid_area_weighted_rectilinear_src_and_grid__perform, _regrid_area_weighted_rectilinear_src_and_grid__prepare, ) -from iris.analysis._interpolation import ( - get_xy_coords, - get_xy_dim_coords, - snapshot_grid, -) +from iris.analysis._interpolation import get_xy_coords, get_xy_dim_coords, snapshot_grid from iris.analysis._regrid import ( _regrid_weighted_curvilinear_to_rectilinear__perform, _regrid_weighted_curvilinear_to_rectilinear__prepare, @@ -55,9 +51,7 @@ warn_deprecated(wmsg) -def regrid_area_weighted_rectilinear_src_and_grid( - src_cube, grid_cube, mdtol=0 -): +def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): """ Return a new cube with data values calculated using the area weighted mean of data values from src_grid regridded onto the horizontal grid of @@ -205,9 +199,7 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): regrid_info = _regrid_weighted_curvilinear_to_rectilinear__prepare( src_cube, weights, grid_cube ) - result = _regrid_weighted_curvilinear_to_rectilinear__perform( - src_cube, regrid_info - ) + result = _regrid_weighted_curvilinear_to_rectilinear__perform(src_cube, regrid_info) return result @@ -368,9 +360,7 @@ def _regrid( tgt_projection = tgt_x_coord.coord_system.as_cartopy_projection() tgt_x, tgt_y = _meshgrid(tgt_x_coord.points, tgt_y_coord.points) - projected_tgt_grid = projection.transform_points( - tgt_projection, tgt_x, tgt_y - ) + projected_tgt_grid = projection.transform_points(tgt_projection, tgt_x, tgt_y) # Prepare the result data array. # XXX TODO: Deal with masked src_data @@ -392,9 +382,7 @@ def _regrid( src_index[xy_dim] = slice(None) src_subset = src_data[tuple(src_index)] tgt_index = ( - index[:xy_dim] - + (slice(None), slice(None)) - + index[xy_dim + 1 :] + index[:xy_dim] + (slice(None), slice(None)) + index[xy_dim + 1 :] ) data[tgt_index] = scipy.interpolate.griddata( projected_src_points[..., :2], @@ -576,8 +564,7 @@ def __call__(self, src_cube): ) if src_cs is None: raise ValueError( - "'src' lateral geographic coordinates have " - "no coordinate system." + "'src' lateral geographic coordinates have no coordinate system." ) # Check the source grid units. @@ -589,8 +576,7 @@ def __call__(self, src_cube): if src_x_dim != src_y_dim: raise ValueError( - "'src' lateral geographic coordinates should map " - "the same dimension." + "'src' lateral geographic coordinates should map the same dimension." ) src_xy_dim = src_x_dim diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index ccea4277d3..a06aba986e 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -54,9 +54,7 @@ def _convert_latlons(crs, x_array, y_array): return ll_values[..., 0], ll_values[..., 1] -def _make_esmpy_field( - x_coord, y_coord, ref_name="field", data=None, mask=None -): +def _make_esmpy_field(x_coord, y_coord, ref_name="field", data=None, mask=None): """ Create an ESMPy ESMF.Field on given coordinates. diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 785bf43e63..bc6e02f4b8 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -104,8 +104,7 @@ def __init__(self, cube): "Attributes:", ] self.sections_data = { - name: None - for name in self.vector_section_names + self.scalar_section_names + name: None for name in self.vector_section_names + self.scalar_section_names } # 'Scalar-cell-measures' is currently alone amongst the scalar sections, # in displaying only a 'name' and no 'value' field. @@ -134,9 +133,7 @@ def _get_dim_names(self): # Add the dim_coord names that participate in the associated data # dimensions. for dim in range(len(self.cube.shape)): - dim_coords = self.cube.coords( - contains_dimension=dim, dim_coords=True - ) + dim_coords = self.cube.coords(contains_dimension=dim, dim_coords=True) if dim_coords: dim_names[dim] = dim_coords[0].name() else: @@ -191,28 +188,20 @@ def _make_header(self): """ # Header row. - tlc_template = ( - '{self.name} ({self.units})' - ) + tlc_template = '{self.name} ({self.units})' top_left_cell = tlc_template.format(self=self) cells = ['', top_left_cell] for dim_name in self.names: - cells.append( - '{}'.format(dim_name) - ) + cells.append('{}'.format(dim_name)) cells.append("") return "\n".join(cell for cell in cells) def _make_shapes_row(self): """Add a row to show data / dimensions shape.""" - title_cell = ( - 'Shape' - ) + title_cell = 'Shape' cells = ['', title_cell] for shape in self.shapes: - cells.append( - '{}'.format(shape) - ) + cells.append('{}'.format(shape)) cells.append("") return "\n".join(cell for cell in cells) @@ -244,9 +233,7 @@ def _make_row(self, title, body=None, col_span=0): ) # Add blank cells for the rest of the rows. for _ in range(self.ndims): - row.append( - template.format(html_cls=' class="iris-title"', content="") - ) + row.append(template.format(html_cls=' class="iris-title"', content="")) else: # This is not a title row. # Deal with name of coord/attr etc. first. @@ -259,9 +246,7 @@ def _make_row(self, title, body=None, col_span=0): ) # One further item or more than that? if col_span != 0: - html_cls = ' class="{}" colspan="{}"'.format( - "iris-word-cell", col_span - ) + html_cls = ' class="{}" colspan="{}"'.format("iris-word-cell", col_span) row.append(template.format(html_cls=html_cls, content=body)) else: # "Inclusion" - `x` or `-`. @@ -298,9 +283,7 @@ def _make_content(self): title = line[:split_point].strip() body = line[split_point + 2 :].strip() - elements.extend( - self._make_row(title, body=body, col_span=colspan) - ) + elements.extend(self._make_row(title, body=body, col_span=colspan)) return "\n".join(element for element in elements) def repr_html(self): @@ -402,9 +385,7 @@ def __init__(self, cubelist): def make_content(self): html = [] for i, cube in enumerate(self.cubelist): - title = "{i}: {summary}".format( - i=i, summary=cube.summary(shorten=True) - ) + title = "{i}: {summary}".format(i=i, summary=cube.summary(shorten=True)) title = escape(title) content = cube._repr_html_() html.append( @@ -417,6 +398,4 @@ def make_content(self): def repr_html(self): contents = self.make_content() contents_str = "\n".join(contents) - return self._template.format( - uid=self.cubelist_id, contents=contents_str - ) + return self._template.format(uid=self.cubelist_id, contents=contents_str) diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index 604fda38a3..d3ba6bfecb 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -127,10 +127,7 @@ def relevel(cube, src_levels, tgt_levels, axis=None, interpolator=None): try: cube_data, src_data = np.broadcast_arrays(cube.data, src_data) except ValueError: - emsg = ( - "Cannot broadcast the cube and src_levels with " - "shapes {} and {}." - ) + emsg = "Cannot broadcast the cube and src_levels with shapes {} and {}." raise ValueError(emsg.format(cube.shape, src_data.shape)) tgt_levels = np.asarray(tgt_levels) diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index ba365aeb1f..959548870e 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -65,9 +65,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"{nc_var_name}" ) if warn: - warnings.warn( - message, category=IrisCfMissingVarWarning - ) + warnings.warn(message, category=IrisCfMissingVarWarning) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -148,9 +146,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): # Restrict to non-string type i.e. not a # CFLabelVariable. if not cf._is_str_dtype(variables[name]): - result[ - name - ] = CFUGridAuxiliaryCoordinateVariable( + result[name] = CFUGridAuxiliaryCoordinateVariable( name, variables[name] ) else: @@ -204,9 +200,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): # We are looking for all mesh variables. Check if THIS variable # is a mesh using its own attributes. if getattr(nc_var, "cf_role", "") == "mesh_topology": - result[nc_var_name] = CFUGridMeshVariable( - nc_var_name, nc_var - ) + result[nc_var_name] = CFUGridMeshVariable(nc_var_name, nc_var) # Check for mesh variable references. nc_var_att = getattr(nc_var, cls.cf_identity, None) @@ -221,16 +215,12 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"referenced by netCDF variable {nc_var_name}" ) if warn: - warnings.warn( - message, category=IrisCfMissingVarWarning - ) + warnings.warn(message, category=IrisCfMissingVarWarning) else: # Restrict to non-string type i.e. not a # CFLabelVariable. if not cf._is_str_dtype(variables[name]): - result[name] = CFUGridMeshVariable( - name, variables[name] - ) + result[name] = CFUGridMeshVariable(name, variables[name]) else: message = ( f"Ignoring variable {name}, identified as a " @@ -238,9 +228,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"variable." ) if warn: - warnings.warn( - message, category=IrisCfLabelVarWarning - ) + warnings.warn(message, category=IrisCfLabelVarWarning) return result diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index c2a4b0c563..001ef0bb9b 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -18,11 +18,7 @@ from ...config import get_logger from ...coords import AuxCoord -from ...exceptions import ( - IrisCfWarning, - IrisDefaultingWarning, - IrisIgnoringWarning, -) +from ...exceptions import IrisCfWarning, IrisDefaultingWarning, IrisIgnoringWarning from ...fileformats._nc_load_rules.helpers import get_attr_units, get_names from ...fileformats.netcdf import loader as nc_loader from ...io import decode_uri, expand_filespecs @@ -45,9 +41,7 @@ class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): pass -class _WarnComboCfDefaultingIgnoring( - _WarnComboCfDefaulting, IrisIgnoringWarning -): +class _WarnComboCfDefaultingIgnoring(_WarnComboCfDefaulting, IrisIgnoringWarning): """One-off combination of warning classes - enhances user filtering.""" pass @@ -148,9 +142,7 @@ def load_mesh(uris, var_name=None): result = set([mesh for file in meshes_result.values() for mesh in file]) mesh_count = len(result) if mesh_count != 1: - message = ( - f"Expecting 1 mesh, but input file(s) produced: {mesh_count} ." - ) + message = f"Expecting 1 mesh, but input file(s) produced: {mesh_count} ." raise ValueError(message) return result.pop() # Return the single element @@ -214,9 +206,7 @@ def load_meshes(uris, var_name=None): for source in sources: if scheme == "file": with open(source, "rb") as fh: - handling_format_spec = FORMAT_AGENT.get_spec( - Path(source).name, fh - ) + handling_format_spec = FORMAT_AGENT.get_spec(Path(source).name, fh) else: handling_format_spec = FORMAT_AGENT.get_spec(source, None) @@ -325,9 +315,7 @@ def _build_connectivity(connectivity_var, file_path, element_dims): else: location_axis = 0 - standard_name, long_name, var_name = get_names( - connectivity_var, None, attributes - ) + standard_name, long_name, var_name = get_names(connectivity_var, None, attributes) connectivity = Connectivity( indices=indices_data, @@ -364,9 +352,7 @@ def _build_mesh(cf, mesh_var, file_path): else: cf_role = getattr(mesh_var, "cf_role") if cf_role != "mesh_topology": - cf_role_message = ( - f"{mesh_var.cf_name} has an inappropriate cf_role: {cf_role}." - ) + cf_role_message = f"{mesh_var.cf_name} has an inappropriate cf_role: {cf_role}." if cf_role_message: cf_role_message += " Correcting to 'mesh_topology'." warnings.warn( @@ -421,13 +407,9 @@ def _build_mesh(cf, mesh_var, file_path): if coord.var_name in mesh_var.node_coordinates.split(): node_coord_args.append(coord_and_axis) node_dimension = coord_var.dimensions[0] - elif ( - coord.var_name in getattr(mesh_var, "edge_coordinates", "").split() - ): + elif coord.var_name in getattr(mesh_var, "edge_coordinates", "").split(): edge_coord_args.append(coord_and_axis) - elif ( - coord.var_name in getattr(mesh_var, "face_coordinates", "").split() - ): + elif coord.var_name in getattr(mesh_var, "face_coordinates", "").split(): face_coord_args.append(coord_and_axis) # TODO: support volume_coordinates. else: @@ -438,10 +420,7 @@ def _build_mesh(cf, mesh_var, file_path): raise ValueError(message) if node_dimension is None: - message = ( - "'node_dimension' could not be identified from mesh node " - "coordinates." - ) + message = "'node_dimension' could not be identified from mesh node coordinates." raise ValueError(message) # Used for detecting transposed connectivities. @@ -479,14 +458,10 @@ def _build_mesh(cf, mesh_var, file_path): face_dimension=face_dimension, ) - mesh_elements = ( - list(mesh.all_coords) + list(mesh.all_connectivities) + [mesh] - ) + mesh_elements = list(mesh.all_coords) + list(mesh.all_connectivities) + [mesh] mesh_elements = filter(None, mesh_elements) for iris_object in mesh_elements: - nc_loader._add_unused_attributes( - iris_object, cf.cf_group[iris_object.var_name] - ) + nc_loader._add_unused_attributes(iris_object, cf.cf_group[iris_object.var_name]) return mesh diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 68d208d867..cd5a440cd0 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -19,11 +19,7 @@ import numpy as np from ... import _lazy_data as _lazy -from ...common import ( - CFVariableMixin, - metadata_filter, - metadata_manager_factory, -) +from ...common import CFVariableMixin, metadata_filter, metadata_manager_factory from ...common.metadata import BaseMetadata from ...config import get_logger from ...coords import AuxCoord, _DimensionalMetadata @@ -55,9 +51,7 @@ # #: Namedtuple for 1D mesh :class:`~iris.coords.AuxCoord` coordinates. -Mesh1DCoords = namedtuple( - "Mesh1DCoords", ["node_x", "node_y", "edge_x", "edge_y"] -) +Mesh1DCoords = namedtuple("Mesh1DCoords", ["node_x", "node_y", "edge_x", "edge_y"]) #: Namedtuple for 2D mesh :class:`~iris.coords.AuxCoord` coordinates. Mesh2DCoords = namedtuple( "Mesh2DCoords", @@ -372,9 +366,7 @@ def indices_error(message): indices_shape = indices.shape if len(indices_shape) != 2: - indices_error( - f"Expected 2-dimensional shape, got: shape={indices_shape} ." - ) + indices_error(f"Expected 2-dimensional shape, got: shape={indices_shape} .") len_req_fail = False if shapes_only: @@ -667,26 +659,18 @@ def normalise(element, axis): # check the UGRID minimum requirement for coordinates if "node_x" not in kwargs: - emsg = ( - "Require a node coordinate that is x-axis like to be provided." - ) + emsg = "Require a node coordinate that is x-axis like to be provided." raise ValueError(emsg) if "node_y" not in kwargs: - emsg = ( - "Require a node coordinate that is y-axis like to be provided." - ) + emsg = "Require a node coordinate that is y-axis like to be provided." raise ValueError(emsg) if self.topology_dimension == 1: self._coord_manager = _Mesh1DCoordinateManager(**kwargs) - self._connectivity_manager = _Mesh1DConnectivityManager( - *connectivities - ) + self._connectivity_manager = _Mesh1DConnectivityManager(*connectivities) elif self.topology_dimension == 2: self._coord_manager = _Mesh2DCoordinateManager(**kwargs) - self._connectivity_manager = _Mesh2DConnectivityManager( - *connectivities - ) + self._connectivity_manager = _Mesh2DConnectivityManager(*connectivities) else: emsg = f"Unsupported 'topology_dimension', got {topology_dimension!r}." raise NotImplementedError(emsg) @@ -799,16 +783,11 @@ def check_shape(array_name): attr_name = f"core_{array_name}" arrays = [getattr(coord, attr_name)() for coord in coords] if any(a is None for a in arrays): - message = ( - f"{array_name} missing from coords[{arrays.index(None)}] ." - ) + message = f"{array_name} missing from coords[{arrays.index(None)}] ." raise ValueError(message) shapes = [array.shape for array in arrays] if shapes.count(shapes[0]) != len(shapes): - message = ( - f"{array_name} shapes are not identical for all " - f"coords." - ) + message = f"{array_name} shapes are not identical for all " f"coords." raise ValueError(message) for array in ("points", "bounds"): @@ -826,8 +805,7 @@ def check_shape(array_name): bounds_dim1 = bounds_shape[1] if bounds_dim1 < 2: message = ( - f"Expected coordinate bounds.shape (n, >" - f"=2), got: {bounds_shape} ." + f"Expected coordinate bounds.shape (n, >" f"=2), got: {bounds_shape} ." ) raise ValueError(message) elif bounds_dim1 == 2: @@ -859,15 +837,11 @@ def check_shape(array_name): units=coord.units, attributes=coord.attributes, ) - node_points = array_lib.ma.filled( - coord.core_bounds(), 0.0 - ).flatten() + node_points = array_lib.ma.filled(coord.core_bounds(), 0.0).flatten() node_coords.append(AuxCoord(points=node_points, **coord_kwargs)) centre_points = coord.core_points() - centre_coords.append( - AuxCoord(points=centre_points, **coord_kwargs) - ) + centre_coords.append(AuxCoord(points=centre_points, **coord_kwargs)) ##### # TODO: remove axis assignment once Mesh supports arbitrary coords. @@ -898,9 +872,7 @@ def axes_assign(coord_list): node_coords_and_axes=node_coords_and_axes, connectivities=[connectivity], ) - mesh_kwargs[ - f"{coord_centring}_coords_and_axes" - ] = centre_coords_and_axes + mesh_kwargs[f"{coord_centring}_coords_and_axes"] = centre_coords_and_axes return cls(**mesh_kwargs) def __eq__(self, other): @@ -1006,9 +978,7 @@ def line(text, i_indent=0): line(f"{dim_name}: '{dim}'", 2) # Print defining connectivity (except node) if element != "node": - main_conn_string = main_conn.summary( - shorten=True, linewidth=0 - ) + main_conn_string = main_conn.summary(shorten=True, linewidth=0) line(f"{main_conn_name}: {main_conn_string}", 2) # Print coords include_key = f"include_{element}s" @@ -1027,9 +997,7 @@ def line(text, i_indent=0): "face_edge_connectivity", "edge_face_connectivity", ) - optional_conns = [ - getattr(self, name, None) for name in optional_conn_names - ] + optional_conns = [getattr(self, name, None) for name in optional_conn_names] optional_conns = { name: conn for conn, name in zip(optional_conns, optional_conn_names) @@ -1086,9 +1054,7 @@ def _set_dimension_names(self, node, edge, face, reset=False): ) zipped = zip(args, currents) if reset: - node, edge, face = [ - None if arg else current for arg, current in zipped - ] + node, edge, face = [None if arg else current for arg, current in zipped] else: node, edge, face = [arg or current for arg, current in zipped] @@ -1103,9 +1069,7 @@ def _set_dimension_names(self, node, edge, face, reset=False): self.node_dimension, self.edge_dimension, self.face_dimension ) else: - message = ( - f"Unsupported topology_dimension: {self.topology_dimension} ." - ) + message = f"Unsupported topology_dimension: {self.topology_dimension} ." raise NotImplementedError(message) return result @@ -1968,9 +1932,7 @@ def to_MeshCoords(self, location): """ # factory method - result = [ - self.to_MeshCoord(location=location, axis=ax) for ax in self.AXES - ] + result = [self.to_MeshCoord(location=location, axis=ax) for ax in self.AXES] return tuple(result) def dimension_names_reset(self, node=False, edge=False, face=False): @@ -2085,11 +2047,7 @@ def __ne__(self, other): return result def __repr__(self): - args = [ - f"{member}={coord!r}" - for member, coord in self - if coord is not None - ] + args = [f"{member}={coord!r}" for member, coord in self if coord is not None] return f"{self.__class__.__name__}({', '.join(args)})" def __setstate__(self, state): @@ -2119,9 +2077,7 @@ def _setter(self, element, axis, coord, shape): # enforce the UGRID minimum coordinate requirement if element == "node" and coord is None: - emsg = ( - f"{member!r} is a required coordinate, cannot set to 'None'." - ) + emsg = f"{member!r} is a required coordinate, cannot set to 'None'." raise ValueError(emsg) if coord is not None: @@ -2140,7 +2096,9 @@ def _setter(self, element, axis, coord, shape): raise TypeError(emsg) if shape is not None and coord.shape != shape: - emsg = f"{member!r} requires to have shape {shape!r}, got {coord.shape!r}." + emsg = ( + f"{member!r} requires to have shape {shape!r}, got {coord.shape!r}." + ) raise ValueError(emsg) self._members[member] = coord @@ -2176,9 +2134,7 @@ def edge_x(self): @edge_x.setter def edge_x(self, coord): - self._setter( - element="edge", axis="x", coord=coord, shape=self._edge_shape - ) + self._setter(element="edge", axis="x", coord=coord, shape=self._edge_shape) @property def edge_y(self): @@ -2186,9 +2142,7 @@ def edge_y(self): @edge_y.setter def edge_y(self, coord): - self._setter( - element="edge", axis="y", coord=coord, shape=self._edge_shape - ) + self._setter(element="edge", axis="y", coord=coord, shape=self._edge_shape) @property def node_coords(self): @@ -2200,9 +2154,7 @@ def node_x(self): @node_x.setter def node_x(self, coord): - self._setter( - element="node", axis="x", coord=coord, shape=self._node_shape - ) + self._setter(element="node", axis="x", coord=coord, shape=self._node_shape) @property def node_y(self): @@ -2210,9 +2162,7 @@ def node_y(self): @node_y.setter def node_y(self, coord): - self._setter( - element="node", axis="y", coord=coord, shape=self._node_shape - ) + self._setter(element="node", axis="y", coord=coord, shape=self._node_shape) def _add(self, coords): member_x, member_y = coords._fields @@ -2254,9 +2204,7 @@ def filter(self, **kwargs): result = self.filters(**kwargs) if len(result) > 1: - names = ", ".join( - f"{member}={coord!r}" for member, coord in result.items() - ) + names = ", ".join(f"{member}={coord!r}" for member, coord in result.items()) emsg = ( f"Expected to find exactly 1 coordinate, but found {len(result)}. " f"They were: {names}." @@ -2276,9 +2224,7 @@ def filter(self, **kwargs): or None ) name = "" if name is None else f"{name!r} " - emsg = ( - f"Expected to find exactly 1 {name}coordinate, but found none." - ) + emsg = f"Expected to find exactly 1 {name}coordinate, but found none." raise CoordinateNotFoundError(emsg) return result @@ -2334,9 +2280,7 @@ def populated_coords(coords_tuple): # Use the results to filter the _members dict for returning. result_ids = [id(r) for r in result] - result_dict = { - k: v for k, v in self._members.items() if id(v) in result_ids - } + result_dict = {k: v for k, v in self._members.items() if id(v) in result_ids} return result_dict def remove( @@ -2403,9 +2347,7 @@ def face_x(self): @face_x.setter def face_x(self, coord): - self._setter( - element="face", axis="x", coord=coord, shape=self._face_shape - ) + self._setter(element="face", axis="x", coord=coord, shape=self._face_shape) @property def face_y(self): @@ -2413,9 +2355,7 @@ def face_y(self): @face_y.setter def face_y(self, coord): - self._setter( - element="face", axis="y", coord=coord, shape=self._face_shape - ) + self._setter(element="face", axis="y", coord=coord, shape=self._face_shape) def add( self, @@ -2500,9 +2440,7 @@ def __setstate__(self, state): def __str__(self): args = [ - f"{member}" - for member, connectivity in self - if connectivity is not None + f"{member}" for member, connectivity in self if connectivity is not None ] return f"{self.__class__.__name__}({', '.join(args)})" @@ -2534,9 +2472,7 @@ def add(self, *connectivities): # Validate shapes. proposed_members = {**self._members, **add_dict} - elements = set( - [c.location for c in proposed_members.values() if c is not None] - ) + elements = set([c.location for c in proposed_members.values() if c is not None]) for element in elements: counts = [ len(c.indices_by_location(c.lazy_indices())) @@ -2558,8 +2494,7 @@ def filter(self, **kwargs): result = self.filters(**kwargs) if len(result) > 1: names = ", ".join( - f"{member}={connectivity!r}" - for member, connectivity in result.items() + f"{member}={connectivity!r}" for member, connectivity in result.items() ) message = ( f"Expected to find exactly 1 connectivity, but found " @@ -2572,9 +2507,7 @@ def filter(self, **kwargs): if item is not None: if not isinstance(item, str): _name = item.name() - bad_name = ( - _name or kwargs["standard_name"] or kwargs["long_name"] or "" - ) + bad_name = _name or kwargs["standard_name"] or kwargs["long_name"] or "" message = ( f"Expected to find exactly 1 {bad_name} connectivity, " f"but found none." @@ -2598,9 +2531,7 @@ def filters( members = [c for c in self._members.values() if c is not None] if cf_role is not None: - members = [ - instance for instance in members if instance.cf_role == cf_role - ] + members = [instance for instance in members if instance.cf_role == cf_role] def element_filter(instances, loc_arg, loc_name): if loc_arg is False: @@ -2636,9 +2567,7 @@ def element_filter(instances, loc_arg, loc_name): # any face cf-roles if none are present. supports_faces = any(["face" in role for role in self.ALL]) if contains_face and not supports_faces: - message = ( - "Ignoring request to filter for non-existent 'face' cf-roles." - ) + message = "Ignoring request to filter for non-existent 'face' cf-roles." logger.debug(message, extra=dict(cls=self.__class__.__name__)) result = metadata_filter( @@ -2652,9 +2581,7 @@ def element_filter(instances, loc_arg, loc_name): # Use the results to filter the _members dict for returning. result_ids = [id(r) for r in result] - result_dict = { - k: v for k, v in self._members.items() if id(v) in result_ids - } + result_dict = {k: v for k, v in self._members.items() if id(v) in result_ids} return result_dict def remove( @@ -2973,9 +2900,7 @@ def copy(self, points=None, bounds=None): # Make a new MeshCoord with the same args : The Mesh is the *same* # as the original (not a copy). - new_coord = MeshCoord( - mesh=self.mesh, location=self.location, axis=self.axis - ) + new_coord = MeshCoord(mesh=self.mesh, location=self.location, axis=self.axis) return new_coord def __deepcopy__(self, memo): @@ -3108,9 +3033,7 @@ def _construct_access_arrays(self): node_points = node_coord.core_points() n_nodes = node_points.shape[0] # Choose real/lazy array library, to suit array types. - lazy = _lazy.is_lazy_data(indices) or _lazy.is_lazy_data( - node_points - ) + lazy = _lazy.is_lazy_data(indices) or _lazy.is_lazy_data(node_points) al = da if lazy else np # NOTE: Dask cannot index with a multidimensional array, so we # must flatten it and restore the shape later. @@ -3120,9 +3043,7 @@ def _construct_access_arrays(self): # with "safe" index values, and post-mask the results. flat_inds_nomask = al.ma.filled(flat_inds, -1) # Note: *also* mask any places where the index is out of range. - missing_inds = (flat_inds_nomask < 0) | ( - flat_inds_nomask >= n_nodes - ) + missing_inds = (flat_inds_nomask < 0) | (flat_inds_nomask >= n_nodes) flat_inds_safe = al.where(missing_inds, 0, flat_inds_nomask) # Here's the core indexing operation. # The comma applies all inds-array values to the *first* dimension. diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py index bfdcc7e114..231803fd74 100644 --- a/lib/iris/experimental/ugrid/metadata.py +++ b/lib/iris/experimental/ugrid/metadata.py @@ -88,10 +88,7 @@ def _compare_lenient(self, other): ConnectivityMetadata._members, ) result = all( - [ - getattr(self, field) == getattr(other, field) - for field in members - ] + [getattr(self, field) == getattr(other, field) for field in members] ) if result: # Perform lenient comparison of the other parent members. @@ -330,10 +327,7 @@ def _compare_lenient(self, other): # Perform "strict" comparison for the MeshCoord specific members # 'location', 'axis' : for equality, they must all match. result = all( - [ - getattr(self, field) == getattr(other, field) - for field in self._members - ] + [getattr(self, field) == getattr(other, field) for field in self._members] ) if result: # Perform lenient comparison of the other parent members. diff --git a/lib/iris/experimental/ugrid/save.py b/lib/iris/experimental/ugrid/save.py index f09740d98c..d10a967014 100644 --- a/lib/iris/experimental/ugrid/save.py +++ b/lib/iris/experimental/ugrid/save.py @@ -49,15 +49,11 @@ def save_mesh(mesh, filename, netcdf_format="NETCDF4"): mesh_dimensions, _ = sman._get_dim_names(mesh) # Create dimensions. - sman._create_cf_dimensions( - cube=None, dimension_names=mesh_dimensions - ) + sman._create_cf_dimensions(cube=None, dimension_names=mesh_dimensions) # Create the mesh components. sman._add_mesh(mesh) # Add a conventions attribute. # TODO: add 'UGRID' to conventions, when this is agreed with CF ? - sman.update_global_attributes( - Conventions=netcdf.CF_CONVENTIONS_VERSION - ) + sman.update_global_attributes(Conventions=netcdf.CF_CONVENTIONS_VERSION) diff --git a/lib/iris/experimental/ugrid/utils.py b/lib/iris/experimental/ugrid/utils.py index 05e60c3ce7..fae14687aa 100644 --- a/lib/iris/experimental/ugrid/utils.py +++ b/lib/iris/experimental/ugrid/utils.py @@ -97,10 +97,7 @@ def recombine_submeshes( result_dtype = None indexcoord_metadata = None for i_sub, cube in enumerate(submesh_cubes): - sub_str = ( - f"Submesh cube #{i_sub + 1}/{len(submesh_cubes)}, " - f'"{cube.name()}"' - ) + sub_str = f"Submesh cube #{i_sub + 1}/{len(submesh_cubes)}, " f'"{cube.name()}"' # Check dimensionality. if cube.ndim != mesh_cube.ndim: @@ -146,9 +143,7 @@ def recombine_submeshes( ) else: # non-mesh dims : look for dim-coords (only) - full_coord = mesh_cube.coords( - dim_coords=True, dimensions=(i_dim,) - ) + full_coord = mesh_cube.coords(dim_coords=True, dimensions=(i_dim,)) sub_coord = cube.coords(dim_coords=True, dimensions=(i_dim,)) if full_coord: @@ -235,8 +230,7 @@ def transposed_copy(cube, dim_order): mesh_cube = transposed_copy(mesh_cube, tranpose_dims) submesh_cubes = [ - transposed_copy(region_cube, tranpose_dims) - for region_cube in submesh_cubes + transposed_copy(region_cube, tranpose_dims) for region_cube in submesh_cubes ] # Also prepare for transforming the output back to the original order diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index b74b420802..0854b46151 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -65,8 +65,7 @@ def _load_grib(*args, **kwargs): from iris_grib import load_cubes except ImportError: raise RuntimeError( - "Unable to load GRIB file - " - '"iris_grib" package is not installed.' + "Unable to load GRIB file - " '"iris_grib" package is not installed.' ) return load_cubes(*args, **kwargs) @@ -142,8 +141,7 @@ def _load_grib(*args, **kwargs): "NetCDF dataset", DataSourceObjectProtocol(), lambda object: all( - hasattr(object, x) - for x in ("variables", "dimensions", "groups", "ncattrs") + hasattr(object, x) for x in ("variables", "dimensions", "groups", "ncattrs") ), # Note: this uses the same call as the above "NetCDF_v4" (and "NetCDF OPeNDAP") # The handler itself needs to detect what is passed + handle it appropriately. @@ -195,7 +193,7 @@ def _load_grib(*args, **kwargs): FORMAT_AGENT.add_spec( FormatSpecification( - "UM Fieldsfile (FF) converted " "with ieee to 32 bit", + "UM Fieldsfile (FF) converted with ieee to 32 bit", MagicNumber(4), 0x00000014, um.load_cubes_32bit_ieee, @@ -207,7 +205,7 @@ def _load_grib(*args, **kwargs): FORMAT_AGENT.add_spec( FormatSpecification( - "UM Fieldsfile (FF) ancillary " "converted with ieee to 32 bit", + "UM Fieldsfile (FF) ancillary converted with ieee to 32 bit", MagicNumber(4), 0xFFFF8000, um.load_cubes_32bit_ieee, @@ -251,14 +249,10 @@ def load_cubes_abf_abl(*args, **kwargs): FORMAT_AGENT.add_spec( - FormatSpecification( - "ABF", FileExtension(), ".abf", load_cubes_abf_abl, priority=3 - ) + FormatSpecification("ABF", FileExtension(), ".abf", load_cubes_abf_abl, priority=3) ) FORMAT_AGENT.add_spec( - FormatSpecification( - "ABL", FileExtension(), ".abl", load_cubes_abf_abl, priority=3 - ) + FormatSpecification("ABL", FileExtension(), ".abl", load_cubes_abf_abl, priority=3) ) diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 76df5d5718..741ca626a9 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -458,9 +458,7 @@ class FF2PP: """ - def __init__( - self, filename, read_data=False, word_depth=DEFAULT_FF_WORD_DEPTH - ): + def __init__(self, filename, read_data=False, word_depth=DEFAULT_FF_WORD_DEPTH): """ Create a FieldsFile to Post Process instance that returns a generator of PPFields contained within the FieldsFile. @@ -534,9 +532,7 @@ def _payload(self, field): boundary_width = pack_dims.x_halo + pack_dims.rim_width y_height, x_width = field.lbrow, field.lbnpt mid_height = y_height - 2 * boundary_height - data_words = ( - boundary_height * x_width * 2 + boundary_width * mid_height * 2 - ) + data_words = boundary_height * x_width * 2 + boundary_width * mid_height * 2 data_depth = data_words * word_depth return data_depth, data_type @@ -826,9 +822,7 @@ def _extract_field(self): "Input field skipped as PPField creation failed :" " error = {!r}" ) - warnings.warn( - msg.format(str(valerr)), category=IrisLoadWarning - ) + warnings.warn(msg.format(str(valerr)), category=IrisLoadWarning) def __iter__(self): return pp._interpret_fields(self._extract_field()) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 7db15d21ac..d0fdd0e273 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -286,9 +286,7 @@ def action_build_dimension_coordinate(engine, providescoord_fact): coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] rule_name = f"fc_build_coordinate_({coord_type})" - coord_grid_class, coord_name = _COORDTYPE_GRIDTYPES_AND_COORDNAMES[ - coord_type - ] + coord_grid_class, coord_name = _COORDTYPE_GRIDTYPES_AND_COORDNAMES[coord_type] if coord_grid_class is None: # Coordinates not identified with a specific grid-type class (latlon, # rotated or projected) are always built, but can have no coord-system. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index ec7a28777b..7be5f9ed63 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -144,6 +144,4 @@ def add_fact(self, fact_name, fact_arglist): A shorthand form used only by the new 'actions' routines. """ - self.add_case_specific_fact( - fact_name=fact_name, fact_arglist=fact_arglist - ) + self.add_case_specific_fact(fact_name=fact_name, fact_arglist=fact_arglist) diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 7044b3a993..1b75594c2a 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -315,12 +315,8 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: nc_cell_method_str = nc_cell_methods[start_ind:end_ind] nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) if not nc_cell_method_match: - msg = ( - f"Failed to fully parse cell method string: {nc_cell_methods}" - ) - warnings.warn( - msg, category=iris.exceptions.IrisCfLoadWarning, stacklevel=2 - ) + msg = f"Failed to fully parse cell method string: {nc_cell_methods}" + warnings.warn(msg, category=iris.exceptions.IrisCfLoadWarning, stacklevel=2) continue nc_cell_methods_matches.append(nc_cell_method_match) @@ -383,12 +379,8 @@ def parse_cell_methods(nc_cell_methods): # # tokenise the key words and field colon marker # - d[_CM_EXTRA] = d[_CM_EXTRA].replace( - "comment:", "<><<:>>" - ) - d[_CM_EXTRA] = d[_CM_EXTRA].replace( - "interval:", "<><<:>>" - ) + d[_CM_EXTRA] = d[_CM_EXTRA].replace("comment:", "<><<:>>") + d[_CM_EXTRA] = d[_CM_EXTRA].replace("interval:", "<><<:>>") d[_CM_EXTRA] = d[_CM_EXTRA].split("<<:>>") if len(d[_CM_EXTRA]) == 1: comment.extend(d[_CM_EXTRA]) @@ -499,9 +491,7 @@ def _get_ellipsoid(cf_grid_var): """ major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) - inverse_flattening = getattr( - cf_grid_var, CF_ATTR_GRID_INVERSE_FLATTENING, None - ) + inverse_flattening = getattr(cf_grid_var, CF_ATTR_GRID_INVERSE_FLATTENING, None) # Avoid over-specification exception. if major is not None and minor is not None: @@ -557,21 +547,15 @@ def build_rotated_coordinate_system(engine, cf_grid_var): """Create a rotated coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) - north_pole_latitude = getattr( - cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0 - ) - north_pole_longitude = getattr( - cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0 - ) + north_pole_latitude = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0) + north_pole_longitude = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0) if north_pole_latitude is None or north_pole_longitude is None: warnings.warn( "Rotated pole position is not fully specified", category=iris.exceptions.IrisCfLoadWarning, ) - north_pole_grid_lon = getattr( - cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0 - ) + north_pole_grid_lon = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0) rcs = iris.coord_systems.RotatedGeogCS( north_pole_latitude, @@ -644,9 +628,7 @@ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): ) false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None - ) + standard_parallel = getattr(cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) cs = iris.coord_systems.LambertConformal( latitude_of_projection_origin, @@ -743,9 +725,7 @@ def build_mercator_coordinate_system(engine, cf_grid_var): longitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None ) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None - ) + standard_parallel = getattr(cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) scale_factor_at_projection_origin = getattr( @@ -810,9 +790,7 @@ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): ) false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - standard_parallels = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None - ) + standard_parallels = getattr(cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) cs = iris.coord_systems.AlbersEqualArea( latitude_of_projection_origin, @@ -879,9 +857,7 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): ) false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - sweep_angle_axis = getattr( - cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None - ) + sweep_angle_axis = getattr(cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None) cs = iris.coord_systems.Geostationary( latitude_of_projection_origin, @@ -905,9 +881,7 @@ def build_oblique_mercator_coordinate_system(engine, cf_grid_var): """ ellipsoid = _get_ellipsoid(cf_grid_var) - azimuth_of_central_line = getattr( - cf_grid_var, CF_ATTR_GRID_AZIMUTH_CENT_LINE, None - ) + azimuth_of_central_line = getattr(cf_grid_var, CF_ATTR_GRID_AZIMUTH_CENT_LINE, None) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None ) @@ -1079,9 +1053,7 @@ def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): """ - vertex_dim_names = set(cf_bounds_var.dimensions).difference( - cf_coord_var.dimensions - ) + vertex_dim_names = set(cf_bounds_var.dimensions).difference(cf_coord_var.dimensions) if len(vertex_dim_names) != 1: msg = ( "Too many dimension names differ between coordinate " @@ -1096,9 +1068,7 @@ def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): ) ) vertex_dim = cf_bounds_var.dimensions.index(*vertex_dim_names) - bounds_data = np.rollaxis( - bounds_data.view(), vertex_dim, len(bounds_data.shape) - ) + bounds_data = np.rollaxis(bounds_data.view(), vertex_dim, len(bounds_data.shape)) return bounds_data @@ -1139,9 +1109,7 @@ def build_dimension_coordinate( # the last one. Test based on shape to support different # dimension names. if cf_bounds_var.shape[:-1] != cf_coord_var.shape: - bounds_data = reorder_bounds_data( - bounds_data, cf_bounds_var, cf_coord_var - ) + bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) else: bounds_data = None @@ -1160,22 +1128,16 @@ def build_dimension_coordinate( # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. - common_dims = [ - dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions - ] + common_dims = [dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions] data_dims = None if common_dims: # Calculate the offset of each common dimension. data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names( - cf_coord_var, coord_name, attributes - ) + standard_name, long_name, var_name = get_names(cf_coord_var, coord_name, attributes) - coord_skipped_msg = ( - f"{cf_coord_var.cf_name} coordinate not added to Cube: " - ) + coord_skipped_msg = f"{cf_coord_var.cf_name} coordinate not added to Cube: " coord_skipped_msg += "{error}" coord_skipped = False @@ -1273,26 +1235,20 @@ def build_auxiliary_coordinate( # Resolving the data to a numpy array (i.e. *not* masked) for # compatibility with array creators (i.e. dask) bounds_data = np.asarray(bounds_data) - bounds_data = reorder_bounds_data( - bounds_data, cf_bounds_var, cf_coord_var - ) + bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) else: bounds_data = None # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. - common_dims = [ - dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions - ] + common_dims = [dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions] data_dims = None if common_dims: # Calculate the offset of each common dimension. data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names( - cf_coord_var, coord_name, attributes - ) + standard_name, long_name, var_name = get_names(cf_coord_var, coord_name, attributes) # Create the coordinate coord = iris.coords.AuxCoord( @@ -1336,9 +1292,7 @@ def build_cell_measures(engine, cf_cm_var): # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. - common_dims = [ - dim for dim in cf_cm_var.dimensions if dim in cf_var.dimensions - ] + common_dims = [dim for dim in cf_cm_var.dimensions if dim in cf_var.dimensions] data_dims = None if common_dims: # Calculate the offset of each common dimension. @@ -1372,9 +1326,7 @@ def build_cell_measures(engine, cf_cm_var): ) else: # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts["cell_measures"].append( - (cell_measure, cf_cm_var.cf_name) - ) + engine.cube_parts["cell_measures"].append((cell_measure, cf_cm_var.cf_name)) ################################################################################ @@ -1392,9 +1344,7 @@ def build_ancil_var(engine, cf_av_var): # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the AV being built. - common_dims = [ - dim for dim in cf_av_var.dimensions if dim in cf_var.dimensions - ] + common_dims = [dim for dim in cf_av_var.dimensions if dim in cf_var.dimensions] data_dims = None if common_dims: # Calculate the offset of each common dimension. @@ -1424,15 +1374,11 @@ def build_ancil_var(engine, cf_av_var): ) else: # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts["ancillary_variables"].append( - (av, cf_av_var.cf_name) - ) + engine.cube_parts["ancillary_variables"].append((av, cf_av_var.cf_name)) ################################################################################ -def _is_lat_lon( - cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes -): +def _is_lat_lon(cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes): """ Determine whether the CF coordinate variable is a latitude/longitude variable. @@ -1625,17 +1571,12 @@ def has_supported_mercator_parameters(engine, cf_name): is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None - ) + standard_parallel = getattr(cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) scale_factor_at_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None ) - if ( - scale_factor_at_projection_origin is not None - and standard_parallel is not None - ): + if scale_factor_at_projection_origin is not None and standard_parallel is not None: warnings.warn( "It does not make sense to provide both " '"scale_factor_at_projection_origin" and "standard_parallel".', @@ -1658,27 +1599,19 @@ def has_supported_polar_stereographic_parameters(engine, cf_name): cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None ) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None - ) + standard_parallel = getattr(cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) scale_factor_at_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None ) - if ( - latitude_of_projection_origin != 90 - and latitude_of_projection_origin != -90 - ): + if latitude_of_projection_origin != 90 and latitude_of_projection_origin != -90: warnings.warn( '"latitude_of_projection_origin" must be +90 or -90.', category=iris.exceptions.IrisCfInvalidCoordParamWarning, ) is_valid = False - if ( - scale_factor_at_projection_origin is not None - and standard_parallel is not None - ): + if scale_factor_at_projection_origin is not None and standard_parallel is not None: warnings.warn( "It does not make sense to provide both " '"scale_factor_at_projection_origin" and "standard_parallel".', diff --git a/lib/iris/fileformats/_pp_lbproc_pairs.py b/lib/iris/fileformats/_pp_lbproc_pairs.py index 86a5f9381d..f9cc9445c9 100644 --- a/lib/iris/fileformats/_pp_lbproc_pairs.py +++ b/lib/iris/fileformats/_pp_lbproc_pairs.py @@ -30,8 +30,5 @@ # lbproc_map is dict mapping lbproc->English and English->lbproc # essentially a one to one mapping LBPROC_MAP = { - x: y - for x, y in itertools.chain( - LBPROC_PAIRS, ((y, x) for x, y in LBPROC_PAIRS) - ) + x: y for x, y in itertools.chain(LBPROC_PAIRS, ((y, x) for x, y in LBPROC_PAIRS)) } diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index 031a5e7483..ca7638a052 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -55,9 +55,7 @@ class _UnstructuredArrayException(Exception): """ -class ArrayStructure( - namedtuple("ArrayStructure", ["stride", "unique_ordered_values"]) -): +class ArrayStructure(namedtuple("ArrayStructure", ["stride", "unique_ordered_values"])): """ Represents the identified structure of an array, where stride is the step between each unique value being seen in order in the flattened @@ -113,9 +111,7 @@ def __eq__(self, other): result = NotImplemented if stride is not None or arr is not None: - result = stride == self.stride and np.all( - self.unique_ordered_values == arr - ) + result = stride == self.stride and np.all(self.unique_ordered_values == arr) return result def __ne__(self, other): @@ -159,9 +155,7 @@ def nd_array_and_dims(self, original_array, target_shape, order="c"): """ if original_array.shape[0] != np.prod(target_shape): - raise ValueError( - "Original array and target shape do not " "match up." - ) + raise ValueError("Original array and target shape do not match up.") stride_product = 1 result = None @@ -182,15 +176,12 @@ def nd_array_and_dims(self, original_array, target_shape, order="c"): # given shape? If so, reshape it back to the target shape, # then index out any dimensions which are constant. if self.stride == stride_product and length == self.size: - vector = original_array.reshape( - target_shape + (-1,), order=order - ) + vector = original_array.reshape(target_shape + (-1,), order=order) # Reduce the dimensionality to a 1d array by indexing # everything but this dimension. vector = vector[ tuple( - 0 if dim != i else slice(None) - for i in range(len(target_shape)) + 0 if dim != i else slice(None) for i in range(len(target_shape)) ) ] # Remove any trailing dimension if it is trivial. @@ -416,9 +407,7 @@ def filter_strides_of_length(length): # If we are to build another dimension on top of this possible # structure, we need to compute the stride that would be # needed for that dimension. - next_stride = np.prod( - [struct.size for (_, struct) in potential] - ) + next_stride = np.prod([struct.size for (_, struct) in potential]) # If we've found a structure whose product is the length of # the fields of this Group, we've got a valid potential. @@ -460,8 +449,7 @@ def __str__(self): for structure in self.possible_structures(): sizes = ( - "{}: {}".format(name, arr_struct.size) - for name, arr_struct in structure + "{}: {}".format(name, arr_struct.size) for name, arr_struct in structure ) result.append(" ({})".format("; ".join(sizes))) diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 26a1f307b2..677945dac3 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -80,8 +80,7 @@ def __init__(self, filename): basename = os.path.basename(filename) if len(basename) != 24: raise ValueError( - "ABFField expects a filename of 24 characters: " - "{}".format(basename) + "ABFField expects a filename of 24 characters: {}".format(basename) ) self._filename = filename @@ -167,7 +166,7 @@ def to_cube(self): end = calendar.monthrange(self.year, self.month)[1] else: raise iris.exceptions.TranslationError( - "Unknown period: " "{}".format(self.period) + "Unknown period: {}".format(self.period) ) start = datetime.date(year=self.year, month=self.month, day=start) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 86960003db..1d0fb5e6af 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -42,9 +42,7 @@ # NetCDF variable attributes handled by the netCDF4 module and # therefore automatically classed as "used" attributes. -_CF_ATTRS_IGNORE = set( - ["_FillValue", "add_offset", "missing_value", "scale_factor"] -) +_CF_ATTRS_IGNORE = set(["_FillValue", "add_offset", "missing_value", "scale_factor"]) #: Supported dimensionless vertical coordinate reference surface/phemomenon #: formula terms. Ref: [CF] Appendix D. @@ -104,8 +102,7 @@ def _identify_common(variables, ignore, target): elif isinstance(target, str): if target not in variables: raise ValueError( - "Cannot identify unknown target CF-netCDF variable %r" - % target + "Cannot identify unknown target CF-netCDF variable %r" % target ) target = {target: variables[target]} else: @@ -195,9 +192,7 @@ def __repr__(self): def cf_attrs(self): """Return a list of all attribute name and value pairs of the CF-netCDF variable.""" - return tuple( - (attr, self.getncattr(attr)) for attr in sorted(self._nc_attrs) - ) + return tuple((attr, self.getncattr(attr)) for attr in sorted(self._nc_attrs)) def cf_attrs_ignored(self): """Return a list of all ignored attribute name and value pairs of the CF-netCDF variable.""" @@ -208,9 +203,7 @@ def cf_attrs_ignored(self): def cf_attrs_used(self): """Return a list of all accessed attribute name and value pairs of the CF-netCDF variable.""" - return tuple( - (attr, self.getncattr(attr)) for attr in sorted(self._cf_attrs) - ) + return tuple((attr, self.getncattr(attr)) for attr in sorted(self._cf_attrs)) def cf_attrs_unused(self): """Return a list of all non-accessed attribute name and value pairs of the CF-netCDF variable.""" @@ -380,9 +373,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): category=iris.exceptions.IrisCfMissingVarWarning, ) else: - result[name] = CFBoundaryVariable( - name, variables[name] - ) + result[name] = CFBoundaryVariable(name, variables[name]) return result @@ -409,9 +400,9 @@ def spans(self, cf_variable): source = self.dimensions target = cf_variable.dimensions # Ignore the bounds extent dimension. - result = set(source[:-1]).issubset(target) or set( - source[1:] - ).issubset(target) + result = set(source[:-1]).issubset(target) or set(source[1:]).issubset( + target + ) return result @@ -455,9 +446,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): category=iris.exceptions.IrisCfMissingVarWarning, ) else: - result[name] = CFClimatologyVariable( - name, variables[name] - ) + result[name] = CFClimatologyVariable(name, variables[name]) return result @@ -484,9 +473,9 @@ def spans(self, cf_variable): source = self.dimensions target = cf_variable.dimensions # Ignore the climatology extent dimension. - result = set(source[:-1]).issubset(target) or set( - source[1:] - ).issubset(target) + result = set(source[:-1]).issubset(target) or set(source[1:]).issubset( + target + ) return result @@ -505,9 +494,7 @@ class CFCoordinateVariable(CFVariable): """ @classmethod - def identify( - cls, variables, ignore=None, target=None, warn=True, monotonic=False - ): + def identify(cls, variables, ignore=None, target=None, warn=True, monotonic=False): result = {} ignore, target = cls._identify_common(variables, ignore, target) @@ -532,9 +519,7 @@ def identify( or nc_var.shape == (1,) or iris.util.monotonic(data) ): - result[nc_var_name] = CFCoordinateVariable( - nc_var_name, nc_var - ) + result[nc_var_name] = CFCoordinateVariable(nc_var_name, nc_var) else: result[nc_var_name] = CFCoordinateVariable(nc_var_name, nc_var) @@ -599,9 +584,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): ) else: if variable_name not in result: - result[ - variable_name - ] = _CFFormulaTermsVariable( + result[variable_name] = _CFFormulaTermsVariable( variable_name, variables[variable_name], nc_var_name, @@ -664,9 +647,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): category=iris.exceptions.IrisCfMissingVarWarning, ) else: - result[name] = CFGridMappingVariable( - name, variables[name] - ) + result[name] = CFGridMappingVariable(name, variables[name]) return result @@ -831,9 +812,9 @@ def spans(self, cf_variable): source = self.dimensions target = cf_variable.dimensions # Ignore label string length dimension. - result = set(source[:-1]).issubset(target) or set( - source[1:] - ).issubset(target) + result = set(source[:-1]).issubset(target) or set(source[1:]).issubset( + target + ) return result @@ -1017,9 +998,7 @@ def __setitem__(self, name, variable): def __getitem__(self, name): if name not in self._cf_variables: - raise KeyError( - "Cannot get unknown CF-netCDF variable name %r" % str(name) - ) + raise KeyError("Cannot get unknown CF-netCDF variable name %r" % str(name)) return self._cf_variables[name] @@ -1069,9 +1048,7 @@ def __init__(self, file_source, warn=False, monotonic=False): if isinstance(file_source, str): # Create from filepath : open it + own it (=close when we die). self._filename = os.path.expanduser(file_source) - self._dataset = _thread_safe_nc.DatasetWrapper( - self._filename, mode="r" - ) + self._dataset = _thread_safe_nc.DatasetWrapper(self._filename, mode="r") self._own_file = True else: # We have been passed an open dataset. @@ -1155,9 +1132,7 @@ def _translate(self): self.cf_group.global_attributes.update(attr_dict) # Identify and register all CF formula terms. - formula_terms = _CFFormulaTermsVariable.identify( - self._dataset.variables - ) + formula_terms = _CFFormulaTermsVariable.identify(self._dataset.variables) for cf_var in formula_terms.values(): for cf_root, cf_term in cf_var.cf_terms_by_root.items(): @@ -1176,9 +1151,7 @@ def _translate(self): ) for name in data_variable_names: - self.cf_group[name] = CFDataVariable( - name, self._dataset.variables[name] - ) + self.cf_group[name] = CFDataVariable(name, self._dataset.variables[name]) def _build_cf_groups(self): """Build the first order relationships between CF-netCDF variables.""" @@ -1239,9 +1212,7 @@ def _build(cf_variable): # Build CF data variable relationships. if isinstance(cf_variable, CFDataVariable): # Add global netCDF attributes. - cf_group.global_attributes.update( - self.cf_group.global_attributes - ) + cf_group.global_attributes.update(self.cf_group.global_attributes) # Add appropriate "dimensioned" CF coordinate variables. cf_group.update( { @@ -1262,10 +1233,7 @@ def _build(cf_variable): # Add appropriate formula terms. for cf_var in self.cf_group.formula_terms.values(): for cf_root in cf_var.cf_terms_by_root: - if ( - cf_root in cf_group - and cf_var.cf_name not in cf_group - ): + if cf_root in cf_group and cf_var.cf_name not in cf_group: # Sanity check dimensionality. if cf_var.spans(cf_variable): cf_group[cf_var.cf_name] = cf_var @@ -1312,10 +1280,7 @@ def _build(cf_variable): if isinstance(terms, str) or not isinstance(terms, Iterable): terms = [terms] cf_var_name = cf_var.cf_name - if ( - cf_term in terms - and cf_var_name not in self.cf_group.promoted - ): + if cf_term in terms and cf_var_name not in self.cf_group.promoted: data_var = CFDataVariable(cf_var_name, cf_var.cf_data) self.cf_group.promoted[cf_var_name] = data_var _build(data_var) @@ -1329,9 +1294,7 @@ def _build(cf_variable): cf_name not in self.cf_group.data_variables and cf_name not in self.cf_group.promoted ): - data_var = CFDataVariable( - cf_name, self.cf_group[cf_name].cf_data - ) + data_var = CFDataVariable(cf_name, self.cf_group[cf_name].cf_data) self.cf_group.promoted[cf_name] = data_var _build(data_var) # Determine whether there are still any ignored variables diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index e3a4493fe8..04fd96ee38 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -41,9 +41,7 @@ def _dot_path(): if not os.path.isabs(path): try: # Check PATH - subprocess.check_output( - [path, "-V"], stderr=subprocess.STDOUT - ) + subprocess.check_output([path, "-V"], stderr=subprocess.STDOUT) except (OSError, subprocess.CalledProcessError): path = None else: @@ -122,20 +120,15 @@ def save_png(source, target, launch=False): # Create png data if not _dot_path(): raise ValueError( - 'Executable "dot" not found: ' - "Review dot_path setting in site.cfg." + 'Executable "dot" not found: ' "Review dot_path setting in site.cfg." ) # To filename or open file handle? if isinstance(target, str): - subprocess.call( - [_dot_path(), "-T", "png", "-o", target, dot_file_path] - ) + subprocess.call([_dot_path(), "-T", "png", "-o", target, dot_file_path]) elif hasattr(target, "write"): if hasattr(target, "mode") and "b" not in target.mode: raise ValueError("Target not binary") - subprocess.call( - [_dot_path(), "-T", "png", dot_file_path], stdout=target - ) + subprocess.call([_dot_path(), "-T", "png", dot_file_path], stdout=target) else: raise ValueError("Can only write dot png for a filename or writable") @@ -149,8 +142,7 @@ def save_png(source, target, launch=False): subprocess.call(("firefox", target)) else: raise iris.exceptions.NotYetImplementedError( - "Unhandled operating system. The image has been created in %s" - % target + "Unhandled operating system. The image has been created in %s" % target ) # Remove the dot file if we created it @@ -223,8 +215,9 @@ def cube_text(cube): # Are there any relationships to data dimensions? dims = cube.coord_dims(coord) for dim in dims: - relationships_association += ( - '\n "%s" -> "CubeDimension_%s":w' % (coord_label, dim) + relationships_association += '\n "%s" -> "CubeDimension_%s":w' % ( + coord_label, + dim, ) dimension_nodes += """ @@ -275,9 +268,7 @@ def cube_text(cube): %(associations)s } """ - cube_attributes = list( - sorted(cube.attributes.items(), key=lambda item: item[0]) - ) + cube_attributes = list(sorted(cube.attributes.items(), key=lambda item: item[0])) cube_node = _dot_node(_GRAPH_INDENT, ":Cube", "Cube", cube_attributes) res_string = template % { "cube_node": cube_node, @@ -314,9 +305,7 @@ def _coord_text(label, coord): attrs = [(name, getattr(coord, name)) for name in _dot_attrs] if coord.attributes: - custom_attrs = sorted( - coord.attributes.items(), key=lambda item: item[0] - ) + custom_attrs = sorted(coord.attributes.items(), key=lambda item: item[0]) attrs.extend(custom_attrs) node = _dot_node(_SUBGRAPH_INDENT, label, coord.__class__.__name__, attrs) diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index 16f71a940f..8b7135bce8 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -38,9 +38,7 @@ def _get_NAME_loader(filename): load = name_loaders.load_NAMEII_field if load is None: - raise ValueError( - "Unable to determine NAME file type " "of {!r}.".format(filename) - ) + raise ValueError("Unable to determine NAME file type of {!r}.".format(filename)) return load diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index 7cc7c61d81..ef6057520a 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -26,9 +26,7 @@ NAMEII_TIMESERIES_DATETIME_FORMAT = "%d/%m/%Y %H:%M:%S" -NAMECoord = collections.namedtuple( - "NAMECoord", ["name", "dimension", "values"] -) +NAMECoord = collections.namedtuple("NAMECoord", ["name", "dimension", "values"]) def _split_name_and_units(name): @@ -138,17 +136,13 @@ def _build_lat_lon_for_NAME_field( step = header["X grid resolution"] count = header["X grid size"] pts = start + np.arange(count, dtype=np.float64) * step - lat_lon = NAMECoord( - name=coord_names[0], dimension=dimindex, values=pts - ) + lat_lon = NAMECoord(name=coord_names[0], dimension=dimindex, values=pts) else: start = header["Y grid origin"] step = header["Y grid resolution"] count = header["Y grid size"] pts = start + np.arange(count, dtype=np.float64) * step - lat_lon = NAMECoord( - name=coord_names[1], dimension=dimindex, values=pts - ) + lat_lon = NAMECoord(name=coord_names[1], dimension=dimindex, values=pts) return lat_lon @@ -187,12 +181,8 @@ def _build_lat_lon_for_NAME_timeseries(column_headings): new_headings.append(heading) column_headings[key] = new_headings - lon = NAMECoord( - name="longitude", dimension=None, values=column_headings["X"] - ) - lat = NAMECoord( - name="latitude", dimension=None, values=column_headings["Y"] - ) + lon = NAMECoord(name="longitude", dimension=None, values=column_headings["X"]) + lat = NAMECoord(name="latitude", dimension=None, values=column_headings["Y"]) return lat, lon @@ -204,9 +194,7 @@ def _calc_integration_period(time_avgs): """ integration_periods = [] - pattern = re.compile( - r"\s*(\d{1,2}day)?\s*(\d{1,2}hr)?\s*(\d{1,2}min)?\s*(\w*)\s*" - ) + pattern = re.compile(r"\s*(\d{1,2}day)?\s*(\d{1,2}hr)?\s*(\d{1,2}min)?\s*(\w*)\s*") for time_str in time_avgs: days = 0 hours = 0 @@ -272,9 +260,7 @@ def _parse_units(units): try: units = cf_units.Unit(units) except ValueError: - warnings.warn( - "Unknown units: {!r}".format(units), category=IrisLoadWarning - ) + warnings.warn("Unknown units: {!r}".format(units), category=IrisLoadWarning) units = cf_units.Unit(None) return units @@ -404,9 +390,7 @@ def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None): return coord -def _generate_cubes( - header, column_headings, coords, data_arrays, cell_methods=None -): +def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None): """ Yield :class:`iris.cube.Cube` instances given the headers, column headings, coords and data_arrays extracted @@ -423,9 +407,7 @@ def _generate_cubes( cube = iris.cube.Cube(data_array) # Determine the name and units. - name = "{} {}".format( - field_headings["Species"], field_headings["Quantity"] - ) + name = "{} {}".format(field_headings["Species"], field_headings["Quantity"]) name = name.upper().replace(" ", "_") cube.rename(name) @@ -438,9 +420,7 @@ def _generate_cubes( # level, time etc.) if "Z" in field_headings: (upper_bound,) = [ - field_headings["... to [Z]"] - if "... to [Z]" in field_headings - else None + field_headings["... to [Z]"] if "... to [Z]" in field_headings else None ] (lower_bound,) = [ field_headings["... from [Z]"] @@ -511,10 +491,7 @@ def _generate_cubes( ) if coord.name == "height" or coord.name == "altitude": icoord.long_name = long_name - if ( - coord.name == "time" - and "Av or Int period" in field_headings - ): + if coord.name == "time" and "Av or Int period" in field_headings: dt = coord.values - field_headings["Av or Int period"] bnds = time_unit.date2num(np.vstack((dt, coord.values)).T) icoord.bounds = bnds.astype(float) @@ -528,10 +505,7 @@ def _generate_cubes( coord_system=coord_sys, units=coord_units, ) - if ( - coord.name == "time" - and "Av or Int period" in field_headings - ): + if coord.name == "time" and "Av or Int period" in field_headings: dt = coord.values - field_headings["Av or Int period"] bnds = time_unit.date2num(np.vstack((dt, coord.values)).T) icoord.bounds = bnds[i, :].astype(float) @@ -612,9 +586,7 @@ def _build_cell_methods(av_or_ints, coord): else: cell_method = None msg = "Unknown {} statistic: {!r}. Unable to create cell method." - warnings.warn( - msg.format(coord, av_or_int), category=IrisLoadWarning - ) + warnings.warn(msg.format(coord, av_or_int), category=IrisLoadWarning) cell_methods.append(cell_method) # NOTE: this can be a None return cell_methods @@ -698,17 +670,11 @@ def load_NAMEIII_field(filename): values=np.array(column_headings["Time"]), ) - cell_methods = _build_cell_methods( - column_headings["Time Av or Int"], tdim.name - ) + cell_methods = _build_cell_methods(column_headings["Time Av or Int"], tdim.name) # Build regular latitude and longitude coordinates. - lon = _build_lat_lon_for_NAME_field( - header, 1, "X", coord_names=coord_names - ) - lat = _build_lat_lon_for_NAME_field( - header, 0, "Y", coord_names=coord_names - ) + lon = _build_lat_lon_for_NAME_field(header, 1, "X", coord_names=coord_names) + lat = _build_lat_lon_for_NAME_field(header, 0, "Y", coord_names=coord_names) coords = [lon, lat, tdim] @@ -717,9 +683,7 @@ def load_NAMEIII_field(filename): shape = (header["Y grid size"], header["X grid size"]) data_arrays = _read_data_arrays(file_handle, n_arrays, shape) - return _generate_cubes( - header, column_headings, coords, data_arrays, cell_methods - ) + return _generate_cubes(header, column_headings, coords, data_arrays, cell_methods) def load_NAMEII_field(filename): @@ -782,9 +746,7 @@ def load_NAMEII_field(filename): if matches: if len(matches.group(1)) > 0: hours = float(matches.group(1)) - column_headings["Av or Int period"].append( - datetime.timedelta(hours=hours) - ) + column_headings["Av or Int period"].append(datetime.timedelta(hours=hours)) # Build a time coordinate. tdim = NAMECoord( @@ -793,9 +755,7 @@ def load_NAMEII_field(filename): values=np.array(column_headings["Time"]), ) - cell_methods = _build_cell_methods( - column_headings["Time Av or Int"], tdim.name - ) + cell_methods = _build_cell_methods(column_headings["Time Av or Int"], tdim.name) # Build regular latitude and longitude coordinates. lon = _build_lat_lon_for_NAME_field(header, 1, "X") @@ -811,9 +771,7 @@ def load_NAMEII_field(filename): shape = (header["Y grid size"], header["X grid size"]) data_arrays = _read_data_arrays(file_handle, n_arrays, shape) - return _generate_cubes( - header, column_headings, coords, data_arrays, cell_methods - ) + return _generate_cubes(header, column_headings, coords, data_arrays, cell_methods) def load_NAMEIII_timeseries(filename): @@ -960,9 +918,7 @@ def load_NAMEII_timeseries(filename): # Time is stored in the first two columns. t = vals[0].strip() + " " + vals[1].strip() - dt = datetime.datetime.strptime( - t, NAMEII_TIMESERIES_DATETIME_FORMAT - ) + dt = datetime.datetime.strptime(t, NAMEII_TIMESERIES_DATETIME_FORMAT) time_list.append(dt) # Populate the data arrays. @@ -1089,9 +1045,7 @@ def load_NAMEIII_version2(filename): # Make a list of data lists to hold the data # for each column.(aimed at T-Z data) data_lists = [[] for i in range(header["Number of field cols"])] - coord_lists = [ - [] for i in range(header["Number of preliminary cols"] - 1) - ] + coord_lists = [[] for i in range(header["Number of preliminary cols"] - 1)] # Iterate over the remaining lines which represent the data in a # column form. @@ -1214,9 +1168,7 @@ def load_NAMEIII_trajectory(filename): A generator :class:`iris.cube.Cube` instances. """ - time_unit = cf_units.Unit( - "hours since epoch", calendar=cf_units.CALENDAR_STANDARD - ) + time_unit = cf_units.Unit("hours since epoch", calendar=cf_units.CALENDAR_STANDARD) with open(filename, "r") as infile: header = read_header(infile) diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index cf550fbb57..1b01e71ca8 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -17,10 +17,7 @@ logger = iris.config.get_logger(__name__) # Note: these probably shouldn't be public, but for now they are. -from .._nc_load_rules.helpers import ( - UnknownCellMethodWarning, - parse_cell_methods, -) +from .._nc_load_rules.helpers import UnknownCellMethodWarning, parse_cell_methods from .loader import DEBUG, NetCDFDataProxy, load_cubes from .saver import ( CF_CONVENTIONS_VERSION, diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 5abffb896f..c3159781b5 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -48,9 +48,7 @@ class _ThreadSafeWrapper(ABC): @classmethod def is_contained_type(cls, instance): - return all( - hasattr(instance, attr) for attr in cls._DUCKTYPE_CHECK_PROPERTIES - ) + return all(hasattr(instance, attr) for attr in cls._DUCKTYPE_CHECK_PROPERTIES) @classmethod def from_existing(cls, instance): @@ -152,9 +150,7 @@ def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]: that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: - dimensions_ = list( - self._contained_instance.get_dims(*args, **kwargs) - ) + dimensions_ = list(self._contained_instance.get_dims(*args, **kwargs)) return tuple([DimensionWrapper.from_existing(d) for d in dimensions_]) @@ -184,10 +180,7 @@ def dimensions(self) -> typing.Dict[str, DimensionWrapper]: """ with _GLOBAL_NETCDF4_LOCK: dimensions_ = self._contained_instance.dimensions - return { - k: DimensionWrapper.from_existing(v) - for k, v in dimensions_.items() - } + return {k: DimensionWrapper.from_existing(v) for k, v in dimensions_.items()} def createDimension(self, *args, **kwargs) -> DimensionWrapper: """ @@ -200,9 +193,7 @@ def createDimension(self, *args, **kwargs) -> DimensionWrapper: within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: - new_dimension = self._contained_instance.createDimension( - *args, **kwargs - ) + new_dimension = self._contained_instance.createDimension(*args, **kwargs) return DimensionWrapper.from_existing(new_dimension) # All Group API that returns Variable(s) is wrapped to instead return @@ -220,9 +211,7 @@ def variables(self) -> typing.Dict[str, VariableWrapper]: """ with _GLOBAL_NETCDF4_LOCK: variables_ = self._contained_instance.variables - return { - k: VariableWrapper.from_existing(v) for k, v in variables_.items() - } + return {k: VariableWrapper.from_existing(v) for k, v in variables_.items()} def createVariable(self, *args, **kwargs) -> VariableWrapper: """ @@ -235,9 +224,7 @@ def createVariable(self, *args, **kwargs) -> VariableWrapper: within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: - new_variable = self._contained_instance.createVariable( - *args, **kwargs - ) + new_variable = self._contained_instance.createVariable(*args, **kwargs) return VariableWrapper.from_existing(new_variable) def get_variables_by_attributes( @@ -255,9 +242,7 @@ def get_variables_by_attributes( """ with _GLOBAL_NETCDF4_LOCK: variables_ = list( - self._contained_instance.get_variables_by_attributes( - *args, **kwargs - ) + self._contained_instance.get_variables_by_attributes(*args, **kwargs) ) return [VariableWrapper.from_existing(v) for v in variables_] diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 1488c0afd3..31b1774f19 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -246,9 +246,7 @@ def _get_cf_var_data(cf_var, filename): if chunks == "contiguous": if ( CHUNK_CONTROL.mode is ChunkControl.Modes.FROM_FILE - and isinstance( - cf_var, iris.fileformats.cf.CFDataVariable - ) + and isinstance(cf_var, iris.fileformats.cf.CFDataVariable) ): raise KeyError( f"{cf_var.cf_name} does not contain pre-existing chunk specifications." @@ -416,8 +414,7 @@ def coord_from_term(term): if cf_var_name == name: return coord warnings.warn( - "Unable to find coordinate for variable " - "{!r}".format(name), + "Unable to find coordinate for variable {!r}".format(name), category=iris.exceptions.IrisFactoryCoordNotFoundWarning, ) @@ -455,9 +452,7 @@ def coord_from_term(term): if coord_p0.has_bounds(): msg = ( "Ignoring atmosphere hybrid sigma pressure " - "scalar coordinate {!r} bounds.".format( - coord_p0.name() - ) + "scalar coordinate {!r} bounds.".format(coord_p0.name()) ) warnings.warn( msg, @@ -484,9 +479,7 @@ def coord_from_term(term): depth_c = coord_from_term("depth_c") nsigma = coord_from_term("nsigma") zlev = coord_from_term("zlev") - factory = OceanSigmaZFactory( - sigma, eta, depth, depth_c, nsigma, zlev - ) + factory = OceanSigmaZFactory(sigma, eta, depth, depth_c, nsigma, zlev) elif formula_type == "ocean_sigma_coordinate": sigma = coord_from_term("sigma") eta = coord_from_term("eta") @@ -762,10 +755,7 @@ def set( raise ValueError(msg) dim_chunks = self.var_dim_chunksizes.setdefault(var_name, {}) for dim_name, chunksize in dimension_chunksizes.items(): - if not ( - isinstance(dim_name, str) - and isinstance(chunksize, int) - ): + if not (isinstance(dim_name, str) and isinstance(chunksize, int)): msg = ( "'dimension_chunksizes' kwargs should be a dict " f"of `str: int` pairs, not {dimension_chunksizes!r}." diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 3c154b8511..e7e4ec615b 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -126,8 +126,7 @@ HybridPressureFactory: _FactoryDefn( primary="delta", std_name="atmosphere_hybrid_sigma_pressure_coordinate", - formula_terms_format="ap: {delta} b: {sigma} " - "ps: {surface_air_pressure}", + formula_terms_format="ap: {delta} b: {sigma} ps: {surface_air_pressure}", ), OceanSigmaZFactory: _FactoryDefn( primary="zlev", @@ -436,9 +435,7 @@ def __init__(self, filename, netcdf_format, compute=True): "NETCDF3_CLASSIC", "NETCDF3_64BIT", ]: - raise ValueError( - "Unknown netCDF file format, got %r" % netcdf_format - ) + raise ValueError("Unknown netCDF file format, got %r" % netcdf_format) # All persistent variables #: CF name mapping with iris coordinates @@ -455,9 +452,7 @@ def __init__(self, filename, netcdf_format, compute=True): #: A dictionary, mapping formula terms to owner cf variable name self._formula_terms_cache = {} #: Target filepath - self.filepath = ( - None # this line just for the API page -- value is set later - ) + self.filepath = None # this line just for the API page -- value is set later #: Whether to complete delayed saves on exit (and raise associated warnings). self.compute = compute # N.B. the file-write-lock *type* actually depends on the dask scheduler type. @@ -645,9 +640,10 @@ def write( if unlimited_dimensions is None: unlimited_dimensions = [] - cf_profile_available = iris.site_configuration.get( - "cf_profile" - ) not in [None, False] + cf_profile_available = iris.site_configuration.get("cf_profile") not in [ + None, + False, + ] if cf_profile_available: # Perform a CF profile of the cube. This may result in an exception # being raised if mandatory requirements are not satisfied. @@ -742,9 +738,7 @@ def write( # Perform a CF patch of the dataset. cf_patch(profile, self._dataset, cf_var_cube) else: - msg = "cf_profile is available but no {} defined.".format( - "cf_patch" - ) + msg = "cf_profile is available but no {} defined.".format("cf_patch") warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) @staticmethod @@ -806,9 +800,7 @@ def update_global_attributes(self, attributes=None, **kwargs): for attr_name in sorted(kwargs): _setncattr(self._dataset, attr_name, kwargs[attr_name]) - def _create_cf_dimensions( - self, cube, dimension_names, unlimited_dimensions=None - ): + def _create_cf_dimensions(self, cube, dimension_names, unlimited_dimensions=None): """Create the CF-netCDF data dimensions. Parameters @@ -914,17 +906,11 @@ def _add_mesh(self, cube_or_mesh): # Record the coordinates (if any) on the mesh variable. if coord_names: coord_names = " ".join(coord_names) - _setncattr( - cf_mesh_var, coords_file_attr, coord_names - ) + _setncattr(cf_mesh_var, coords_file_attr, coord_names) # Add all the connectivity variables. # pre-fetch the set + ignore "None"s, which are empty slots. - conns = [ - conn - for conn in mesh.all_connectivities - if conn is not None - ] + conns = [conn for conn in mesh.all_connectivities if conn is not None] for conn in conns: # Get the connectivity role, = "{loc1}_{loc2}_connectivity". cf_conn_attr_name = conn.cf_role @@ -1171,9 +1157,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): key: self._name_coord_map.name(coord) for key, coord in factory.dependencies.items() } - formula_terms = factory_defn.formula_terms_format.format( - **names - ) + formula_terms = factory_defn.formula_terms_format.format(**names) std_name = factory_defn.std_name if hasattr(cf_var, "formula_terms"): @@ -1188,10 +1172,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): # dimensionless vertical coordinate is required # with new formula_terms and a renamed dimension. if cf_name in dimension_names: - msg = ( - "Unable to create dimensonless vertical " - "coordinate." - ) + msg = "Unable to create dimensonless vertical coordinate." raise ValueError(msg) key = (cf_name, std_name, formula_terms) name = self._formula_terms_cache.get(key) @@ -1212,9 +1193,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): # Update the associated cube variable. coords = cf_var_cube.coordinates.split() coords = [name if c == cf_name else c for c in coords] - _setncattr( - cf_var_cube, "coordinates", " ".join(coords) - ) + _setncattr(cf_var_cube, "coordinates", " ".join(coords)) else: _setncattr(cf_var, "standard_name", std_name) _setncattr(cf_var, "axis", "Z") @@ -1243,9 +1222,7 @@ def _get_dim_names(self, cube_or_mesh): """ - def record_dimension( - names_list, dim_name, length, matching_coords=None - ): + def record_dimension(names_list, dim_name, length, matching_coords=None): """Record a file dimension, its length and associated "coordinates". Record a file dimension, its length and associated "coordinates" @@ -1327,9 +1304,7 @@ def record_dimension( if dim_name is not None: # For mesh-identifying coords, we require the *same* # coord, not an identical one (i.e. "is" not "==") - stored_coord = self._dim_names_and_coords.coord( - dim_name - ) + stored_coord = self._dim_names_and_coords.coord(dim_name) if dim_element is not stored_coord: # This is *not* a proper match after all. dim_name = None @@ -1340,9 +1315,7 @@ def record_dimension( (dim_length,) = dim_element.shape else: # extract source dim, respecting dim-ordering - dim_length = dim_element.shape[ - dim_element.location_axis - ] + dim_length = dim_element.shape[dim_element.location_axis] # Name it for the relevant mesh dimension location_dim_attr = f"{location}_dimension" dim_name = getattr(mesh, location_dim_attr) @@ -1405,9 +1378,7 @@ def record_dimension( if dim_name is None: # Not already present : create a unique dimension name # from the coord. - dim_name = self._get_coord_variable_name( - cube, coord - ) + dim_name = self._get_coord_variable_name(cube, coord) # Disambiguate if it has the same name as an # existing dimension. # OR if it matches an existing file variable name. @@ -1431,16 +1402,12 @@ def record_dimension( # block above. while ( dim_name in self._existing_dim - and ( - self._existing_dim[dim_name] != cube.shape[dim] - ) + and (self._existing_dim[dim_name] != cube.shape[dim]) ) or dim_name in self._dataset.variables: dim_name = self._increment_name(dim_name) # Record the dimension. - record_dimension( - cube_dimensions, dim_name, cube.shape[dim], dim_coords - ) + record_dimension(cube_dimensions, dim_name, cube.shape[dim], dim_coords) return mesh_dimensions, cube_dimensions @@ -1512,18 +1479,14 @@ def _ensure_valid_dtype(self, values, src_name, src_object): if is_lazy_data(values): val_min, val_max = _co_realise_lazy_arrays([val_min, val_max]) # Cast to an integer type supported by netCDF3. - can_cast = all( - [np.can_cast(m, np.int32) for m in (val_min, val_max)] - ) + can_cast = all([np.can_cast(m, np.int32) for m in (val_min, val_max)]) if not can_cast: msg = ( "The data type of {} {!r} is not supported by {} and" " its values cannot be safely cast to a supported" " integer type." ) - msg = msg.format( - src_name, src_object, self._dataset.file_format - ) + msg = msg.format(src_name, src_object, self._dataset.file_format) raise ValueError(msg) values = values.astype(np.int32) return values @@ -1569,9 +1532,7 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): while bounds_dimension_name in self._dataset.variables: # Also avoid collision with variable names. # See '_get_dim_names' for reason. - bounds_dimension_name = self._increment_name( - bounds_dimension_name - ) + bounds_dimension_name = self._increment_name(bounds_dimension_name) self._dataset.createDimension(bounds_dimension_name, n_bounds) boundsvar_name = "{}_{}".format(cf_name, varname_extra) @@ -1858,9 +1819,7 @@ def _create_generic_cf_array_var( while string_dimension_name in self._dataset.variables: # Also avoid collision with variable names. # See '_get_dim_names' for reason. - string_dimension_name = self._increment_name( - string_dimension_name - ) + string_dimension_name = self._increment_name(string_dimension_name) self._dataset.createDimension( string_dimension_name, string_dimension_depth ) @@ -2011,9 +1970,7 @@ def _create_cf_grid_mapping(self, cube, cf_var_cube): cf_var_grid = self._dataset.createVariable( cs.grid_mapping_name, np.int32 ) - _setncattr( - cf_var_grid, "grid_mapping_name", cs.grid_mapping_name - ) + _setncattr(cf_var_grid, "grid_mapping_name", cs.grid_mapping_name) def add_ellipsoid(ellipsoid): cf_var_grid.longitude_of_prime_meridian = ( @@ -2037,15 +1994,9 @@ def add_ellipsoid(ellipsoid): elif isinstance(cs, iris.coord_systems.RotatedGeogCS): if cs.ellipsoid: add_ellipsoid(cs.ellipsoid) - cf_var_grid.grid_north_pole_latitude = ( - cs.grid_north_pole_latitude - ) - cf_var_grid.grid_north_pole_longitude = ( - cs.grid_north_pole_longitude - ) - cf_var_grid.north_pole_grid_longitude = ( - cs.north_pole_grid_longitude - ) + cf_var_grid.grid_north_pole_latitude = cs.grid_north_pole_latitude + cf_var_grid.grid_north_pole_longitude = cs.grid_north_pole_longitude + cf_var_grid.north_pole_grid_longitude = cs.north_pole_grid_longitude # tmerc elif isinstance(cs, iris.coord_systems.TransverseMercator): @@ -2095,9 +2046,7 @@ def add_ellipsoid(ellipsoid): if cs.ellipsoid: add_ellipsoid(cs.ellipsoid) cf_var_grid.latitude_of_projection_origin = cs.central_lat - cf_var_grid.straight_vertical_longitude_from_pole = ( - cs.central_lon - ) + cf_var_grid.straight_vertical_longitude_from_pole = cs.central_lon cf_var_grid.false_easting = cs.false_easting cf_var_grid.false_northing = cs.false_northing # Only one of these should be set @@ -2140,9 +2089,7 @@ def add_ellipsoid(ellipsoid): ) # lambert azimuthal equal area - elif isinstance( - cs, iris.coord_systems.LambertAzimuthalEqualArea - ): + elif isinstance(cs, iris.coord_systems.LambertAzimuthalEqualArea): if cs.ellipsoid: add_ellipsoid(cs.ellipsoid) cf_var_grid.longitude_of_projection_origin = ( @@ -2180,9 +2127,7 @@ def add_ellipsoid(ellipsoid): ) cf_var_grid.false_easting = cs.false_easting cf_var_grid.false_northing = cs.false_northing - cf_var_grid.perspective_point_height = ( - cs.perspective_point_height - ) + cf_var_grid.perspective_point_height = cs.perspective_point_height # geostationary elif isinstance(cs, iris.coord_systems.Geostationary): @@ -2196,27 +2141,20 @@ def add_ellipsoid(ellipsoid): ) cf_var_grid.false_easting = cs.false_easting cf_var_grid.false_northing = cs.false_northing - cf_var_grid.perspective_point_height = ( - cs.perspective_point_height - ) + cf_var_grid.perspective_point_height = cs.perspective_point_height cf_var_grid.sweep_angle_axis = cs.sweep_angle_axis # oblique mercator (and rotated variant) # Use duck-typing over isinstance() - subclasses (i.e. # RotatedMercator) upset mock tests. - elif ( - getattr(cs, "grid_mapping_name", None) - == "oblique_mercator" - ): + elif getattr(cs, "grid_mapping_name", None) == "oblique_mercator": # RotatedMercator subclasses ObliqueMercator, and RM # instances are implicitly saved as OM due to inherited # properties. This is correct because CF 1.11 is removing # all mention of RM. if cs.ellipsoid: add_ellipsoid(cs.ellipsoid) - cf_var_grid.azimuth_of_central_line = ( - cs.azimuth_of_central_line - ) + cf_var_grid.azimuth_of_central_line = cs.azimuth_of_central_line cf_var_grid.latitude_of_projection_origin = ( cs.latitude_of_projection_origin ) @@ -2484,9 +2422,7 @@ def _lazy_stream_data(self, data, fill_value, fill_warn, cf_var): # does not have a compatible type. This causes a deprecation warning at # numpy 1.24, *and* was preventing correct fill-value checking of character # data, since they are actually bytes (dtype 'S1'). - fill_value_to_check = np.array( - fill_value_to_check, dtype=dtype - ) + fill_value_to_check = np.array(fill_value_to_check, dtype=dtype) else: # A None means we will NOT check for collisions. fill_value_to_check = None @@ -2510,9 +2446,7 @@ def store(data, cf_var, fill_info): self.filepath, cf_var, self.file_write_lock ) # Add to the list of delayed writes, used in delayed_completion(). - self._delayed_writes.append( - (data, write_wrapper, fill_info) - ) + self._delayed_writes.append((data, write_wrapper, fill_info)) # In this case, fill-value checking is done later. But return 2 dummy # values, to be consistent with the non-streamed "store" signature. is_masked, contains_value = False, False @@ -2523,18 +2457,14 @@ def store(data, cf_var, fill_info): # We also check it immediately for any fill-value problems. def store(data, cf_var, fill_info): cf_var[:] = data - return _data_fillvalue_check( - np, data, fill_info.check_value - ) + return _data_fillvalue_check(np, data, fill_info.check_value) # Store the data and check if it is masked and contains the fill value. is_masked, contains_fill_value = store(data, cf_var, fill_info) if not doing_delayed_save: # Issue a fill-value warning immediately, if appropriate. - _fillvalue_report( - fill_info, is_masked, contains_fill_value, warn=True - ) + _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=True) def delayed_completion(self) -> Delayed: """Perform file completion for delayed saves. @@ -2578,9 +2508,7 @@ def compute_and_return_warnings(store_op, fv_infos, fv_checks): results = [] # Pair each fill_check result (is_masked, contains_value) with its # fillinfo and construct a suitable Warning if needed. - for fillinfo, (is_masked, contains_value) in zip( - fv_infos, fv_checks - ): + for fillinfo, (is_masked, contains_value) in zip(fv_infos, fv_checks): fv_warning = _fillvalue_report( fill_info=fillinfo, is_masked=is_masked, @@ -2636,9 +2564,7 @@ def complete(self, issue_warnings=True) -> List[Warning]: if issue_warnings: # Issue any delayed warnings from the compute. for delayed_warning in result_warnings: - warnings.warn( - delayed_warning, category=iris.exceptions.IrisSaveWarning - ) + warnings.warn(delayed_warning, category=iris.exceptions.IrisSaveWarning) return result_warnings @@ -3029,20 +2955,17 @@ def is_valid_packspec(p): if iris.config.netcdf.conventions_override: # Set to the default if custom conventions are not available. - conventions = cube.attributes.get( - "Conventions", CF_CONVENTIONS_VERSION - ) + conventions = cube.attributes.get("Conventions", CF_CONVENTIONS_VERSION) else: conventions = CF_CONVENTIONS_VERSION # Perform a CF patch of the conventions attribute. - cf_profile_available = iris.site_configuration.get( - "cf_profile" - ) not in [None, False] + cf_profile_available = iris.site_configuration.get("cf_profile") not in [ + None, + False, + ] if cf_profile_available: - conventions_patch = iris.site_configuration.get( - "cf_patch_conventions" - ) + conventions_patch = iris.site_configuration.get("cf_patch_conventions") if conventions_patch is not None: conventions = conventions_patch(conventions) else: diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index f4033cfb1d..116fecaa35 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -264,9 +264,7 @@ def _read_data(self, infile): elif self.datum_len == 4: numpy_dtype = np.int32 else: - raise TranslationError( - "Undefined datum length " "%d" % self.datum_type - ) + raise TranslationError("Undefined datum length %d" % self.datum_type) # 2:byte elif self.datum_type == 2: numpy_dtype = np.byte @@ -328,9 +326,7 @@ def load_cubes(filenames, callback=None): # Were we given a callback? if callback is not None: - cube = iris.io.run_callback( - callback, cube, field, filename - ) + cube = iris.io.run_callback(callback, cube, field, filename) if cube is None: continue diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index 7347135422..39a150beb0 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -42,9 +42,7 @@ class TranslationWarning(IrisNimrodTranslationWarning): def is_missing(field, value): """Return True if value matches an "is-missing" number.""" - return any( - np.isclose(value, [field.int_mdi, field.float32_mdi, NIMROD_DEFAULT]) - ) + return any(np.isclose(value, [field.int_mdi, field.float32_mdi, NIMROD_DEFAULT])) def name(cube, field, handle_metadata_errors): @@ -97,9 +95,7 @@ def remove_unprintable_chars(input_str): Remove unprintable characters from a string and return the result. """ - return "".join( - c if c in string.printable else " " for c in input_str - ).strip() + return "".join(c if c in string.printable else " " for c in input_str).strip() def units(cube, field): @@ -144,9 +140,9 @@ def units(cube, field): if "^" in unit_list[1]: # Split out magnitude unit_sublist = unit_list[1].split("^") - cube.data = cube.data.astype(np.float32) / float( - unit_sublist[0] - ) ** float(unit_sublist[1]) + cube.data = cube.data.astype(np.float32) / float(unit_sublist[0]) ** float( + unit_sublist[1] + ) else: cube.data = cube.data.astype(np.float32) / float(unit_list[1]) field_units = unit_list[0] @@ -187,9 +183,7 @@ def units(cube, field): except ValueError: # Just add it as an attribute. warnings.warn( - "Unhandled units '{0}' recorded in cube attributes.".format( - field_units - ), + "Unhandled units '{0}' recorded in cube attributes.".format(field_units), category=IrisNimrodTranslationWarning, ) cube.attributes["invalid_units"] = field_units @@ -215,10 +209,7 @@ def time(cube, field): period_seconds = None if field.period_minutes == 32767: period_seconds = field.period_seconds - elif ( - not is_missing(field, field.period_minutes) - and field.period_minutes != 0 - ): + elif not is_missing(field, field.period_minutes) and field.period_minutes != 0: period_seconds = field.period_minutes * 60 if period_seconds: bounds = np.array([point - period_seconds, point], dtype=np.int64) @@ -300,18 +291,14 @@ def mask_cube(cube, field): # field.data are floats masked_points = np.isclose(field.data, field.float32_mdi) if np.any(masked_points): - cube.data = np.ma.masked_array( - cube.data, mask=masked_points, dtype=dtype - ) + cube.data = np.ma.masked_array(cube.data, mask=masked_points, dtype=dtype) def experiment(cube, field): """Add an 'experiment number' to the cube, if present in the field.""" if not is_missing(field, field.experiment_num): cube.add_aux_coord( - DimCoord( - field.experiment_num, long_name="experiment_number", units="1" - ) + DimCoord(field.experiment_num, long_name="experiment_number", units="1") ) @@ -342,15 +329,10 @@ def proj_biaxial_ellipsoid(field, handle_metadata_errors): ellipsoid = airy_1830 elif field.proj_biaxial_ellipsoid == 1: ellipsoid = international_1924 - elif ( - is_missing(field, field.proj_biaxial_ellipsoid) - and handle_metadata_errors - ): + elif is_missing(field, field.proj_biaxial_ellipsoid) and handle_metadata_errors: if field.horizontal_grid_type == 0: ellipsoid = airy_1830 - elif ( - field.horizontal_grid_type == 1 or field.horizontal_grid_type == 4 - ): + elif field.horizontal_grid_type == 1 or field.horizontal_grid_type == 4: ellipsoid = international_1924 else: raise TranslationError( @@ -571,9 +553,7 @@ def vertical_coord(cube, field): return # A bounded vertical coord starting from the surface coord_point = 0.0 - coord_args = vertical_codes.get( - field.reference_vertical_coord_type, None - ) + coord_args = vertical_codes.get(field.reference_vertical_coord_type, None) coord_point = np.array(coord_point, dtype=np.float32) if ( field.reference_vertical_coord >= 0.0 @@ -586,16 +566,13 @@ def vertical_coord(cube, field): bounds = None if coord_args: - new_coord = iris.coords.AuxCoord( - coord_point, bounds=bounds, **coord_args - ) + new_coord = iris.coords.AuxCoord(coord_point, bounds=bounds, **coord_args) # Add coordinate to cube cube.add_aux_coord(new_coord) return warnings.warn( - "Vertical coord {!r} not yet handled" - "".format(field.vertical_coord_type), + "Vertical coord {!r} not yet handled".format(field.vertical_coord_type), category=TranslationWarning, ) @@ -669,9 +646,7 @@ def add_attr(item): # Remove member number from cube_source. This commonly takes the form ek04 where ek # indicates the model and 04 is the realization number. As the number is represented # by a realization coord, stripping it from here allows cubes to be merged. - match = re.match( - r"^(?P\w\w)(?P\d\d)$", cube_source - ) + match = re.match(r"^(?P\w\w)(?P\d\d)$", cube_source) try: r_coord = cube.coord("realization") except CoordinateNotFoundError: @@ -788,9 +763,7 @@ def probability_coord(cube, field, handle_metadata_errors): if handle_metadata_errors: coord_keys.update(known_threshold_coord(field)) if not coord_keys.get("units"): - coord_keys["units"] = units_from_field_code.get( - field.field_code, "unknown" - ) + coord_keys["units"] = units_from_field_code.get(field.field_code, "unknown") coord_val = None # coord_val could come from the threshold_value or threshold_value_alt: if field.threshold_value_alt > -32766.0: @@ -808,9 +781,7 @@ def probability_coord(cube, field, handle_metadata_errors): ): try: coord_val = [ - int(x.strip("pc")) - for x in cube.name().split(" ") - if x.find("pc") > 0 + int(x.strip("pc")) for x in cube.name().split(" ") if x.find("pc") > 0 ][0] except IndexError: pass @@ -890,9 +861,7 @@ def soil_type_coord(cube, field): soil_name = soil_type_codes.get(field.soil_type, None) if soil_name: cube.add_aux_coord( - iris.coords.AuxCoord( - soil_name, standard_name="soil_type", units=None - ) + iris.coords.AuxCoord(soil_name, standard_name="soil_type", units=None) ) diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 4b2b7eeae0..ec87870b1b 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -30,9 +30,7 @@ # NOTE: this is for backwards-compatitibility *ONLY* # We could simply remove it for v2.0 ? -from iris.fileformats._pp_lbproc_pairs import ( # noqa: F401 - LBPROC_MAP as lbproc_map, -) +from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP as lbproc_map # noqa: F401 from iris.fileformats._pp_lbproc_pairs import LBPROC_PAIRS # noqa: F401 import iris.fileformats.pp_load_rules from iris.fileformats.pp_save_rules import verify @@ -307,8 +305,7 @@ def from_msi(msi): if msi_match is None: raise ValueError( - 'Expected STASH code MSI string "mXXsXXiXXX", ' - "got %r" % (msi,) + 'Expected STASH code MSI string "mXXsXXiXXX", ' "got %r" % (msi,) ) return STASH(*msi_match.groups()) @@ -406,8 +403,7 @@ def __init__(self, value, name_mapping_dict=None): """ if value < 0: raise ValueError( - "Negative numbers not supported with splittable" - " integers object" + "Negative numbers not supported with splittable integers object" ) # define the name lookup first (as this is the way __setattr__ is @@ -432,9 +428,7 @@ def _calculate_str_value_from_value(self): object.__setattr__(self, name, self[index]) def _calculate_value_from_str_value(self): - self._value = np.sum( - [10**i * val for i, val in enumerate(self._strvalue)] - ) + self._value = np.sum([10**i * val for i, val in enumerate(self._strvalue)]) def __len__(self): return len(self._strvalue) @@ -458,9 +452,7 @@ def __setitem__(self, key, value): # the entire object appropriately. if not isinstance(value, int) or value < 0: - raise ValueError( - "Can only set %s as a positive integer value." % key - ) + raise ValueError("Can only set %s as a positive integer value." % key) if isinstance(key, slice): if ( @@ -481,15 +473,12 @@ def __setitem__(self, key, value): indices = range(*key.indices(100)) if len(indices) < len(str(value)): raise ValueError( - "Cannot put %s into %s as it has too many" - " digits." % (value, key) + "Cannot put %s into %s as it has too many digits." % (value, key) ) # Iterate over each of the indices in the slice, # zipping them together with the associated digit - for index, digit in zip( - indices, str(value).zfill(current_length)[::-1] - ): + for index, digit in zip(indices, str(value).zfill(current_length)[::-1]): # assign each digit to the associated index self.__setitem__(index, int(digit)) @@ -748,13 +737,11 @@ def _data_bytes_to_shaped_array( current_posn : current_posn + boundary_width * mid_height ] current_posn += len(east) - data[ - boundary_height:-boundary_height, -boundary_width: - ] = east.reshape(*e_w_shape) + data[boundary_height:-boundary_height, -boundary_width:] = east.reshape( + *e_w_shape + ) - south = compressed_data[ - current_posn : current_posn + boundary_height * x_width - ] + south = compressed_data[current_posn : current_posn + boundary_height * x_width] current_posn += len(south) data[:boundary_height, :] = south.reshape(*n_s_shape) @@ -965,9 +952,7 @@ def t2(self): def __repr__(self): """Return a string representation of the PP field.""" # Define an ordering on the basic header names - attribute_priority_lookup = { - name: loc[0] for name, loc in self.HEADER_DEFN - } + attribute_priority_lookup = {name: loc[0] for name, loc in self.HEADER_DEFN} # With the attributes sorted the order will remain stable if extra # attributes are added. @@ -975,8 +960,7 @@ def __repr__(self): EXTRA_DATA.values() ) self_attrs = [ - (name, getattr(self, name, None)) - for name in public_attribute_names + (name, getattr(self, name, None)) for name in public_attribute_names ] self_attrs = [pair for pair in self_attrs if pair[1] is not None] @@ -993,9 +977,7 @@ def __repr__(self): ) return ( - "PP Field" - + "".join(["\n %s: %s" % (k, v) for k, v in attributes]) - + "\n" + "PP Field" + "".join(["\n %s: %s" % (k, v) for k, v in attributes]) + "\n" ) @property @@ -1037,9 +1019,7 @@ def lbtim(self): def lbtim(self, value): value = int(value) self.raw_lbtim = value - self._lbtim = SplittableInt( - value, {"ia": slice(2, None), "ib": 1, "ic": 0} - ) + self._lbtim = SplittableInt(value, {"ia": slice(2, None), "ib": 1, "ic": 0}) # lbcode @property @@ -1050,9 +1030,7 @@ def lbcode(self): def lbcode(self, new_value): if not isinstance(new_value, SplittableInt): # add the ix/iy values for lbcode - new_value = SplittableInt( - new_value, {"iy": slice(0, 2), "ix": slice(2, 4)} - ) + new_value = SplittableInt(new_value, {"iy": slice(0, 2), "ix": slice(2, 4)}) self._lbcode = new_value # lbpack @@ -1112,17 +1090,13 @@ def calendar(self): calendar = cf_units.CALENDAR_365_DAY return calendar - def _read_extra_data( - self, pp_file, file_reader, extra_len, little_ended=False - ): + def _read_extra_data(self, pp_file, file_reader, extra_len, little_ended=False): """Read the extra data section and update the self appropriately.""" dtype_endian_char = "<" if little_ended else ">" # While there is still extra data to decode run this loop while extra_len > 0: dtype = "%cL" % dtype_endian_char - extra_int_code = struct.unpack_from( - dtype, file_reader(PP_WORD_DEPTH) - )[0] + extra_int_code = struct.unpack_from(dtype, file_reader(PP_WORD_DEPTH))[0] extra_len -= PP_WORD_DEPTH ib = extra_int_code % 1000 @@ -1209,12 +1183,8 @@ def save(self, file_handle): data.dtype = data.dtype.newbyteorder(">") # Create the arrays which will hold the header information - lb = np.empty( - shape=NUM_LONG_HEADERS, dtype=np.dtype(">u%d" % PP_WORD_DEPTH) - ) - b = np.empty( - shape=NUM_FLOAT_HEADERS, dtype=np.dtype(">f%d" % PP_WORD_DEPTH) - ) + lb = np.empty(shape=NUM_LONG_HEADERS, dtype=np.dtype(">u%d" % PP_WORD_DEPTH)) + b = np.empty(shape=NUM_FLOAT_HEADERS, dtype=np.dtype(">f%d" % PP_WORD_DEPTH)) # Fill in the header elements from the PPField for name, pos in self.HEADER_DEFN: @@ -1228,9 +1198,7 @@ def save(self, file_handle): header_elem = int(header_elem) lb[index] = header_elem else: - index = slice( - pos[0] - NUM_LONG_HEADERS, pos[-1] - NUM_LONG_HEADERS + 1 - ) + index = slice(pos[0] - NUM_LONG_HEADERS, pos[-1] - NUM_LONG_HEADERS + 1) b[index] = header_elem # Although all of the elements are now populated, we still need to @@ -1309,9 +1277,7 @@ def save(self, file_handle): raise NotImplementedError(msg) # populate lbrec in WORDS - lb[self.HEADER_DICT["lblrec"][0]] = ( - len_of_data_payload // PP_WORD_DEPTH - ) + lb[self.HEADER_DICT["lblrec"][0]] = len_of_data_payload // PP_WORD_DEPTH # populate lbuser[0] to have the data's datatype if data.dtype == np.dtype(">f4"): @@ -1394,9 +1360,7 @@ def save(self, file_handle): # def time_unit(self, time_unit, epoch="epoch"): - return cf_units.Unit( - "%s since %s" % (time_unit, epoch), calendar=self.calendar - ) + return cf_units.Unit("%s since %s" % (time_unit, epoch), calendar=self.calendar) def coord_system(self): """Return a CoordSystem for this PPField. @@ -1518,9 +1482,7 @@ def t1(self): """ if not hasattr(self, "_t1"): has_year_zero = self.lbyr == 0 or None - calendar = ( - None if self.lbmon == 0 or self.lbdat == 0 else self.calendar - ) + calendar = None if self.lbmon == 0 or self.lbdat == 0 else self.calendar self._t1 = cftime.datetime( self.lbyr, self.lbmon, @@ -1552,9 +1514,7 @@ def t2(self): """ if not hasattr(self, "_t2"): has_year_zero = self.lbyrd == 0 or None - calendar = ( - None if self.lbmond == 0 or self.lbdatd == 0 else self.calendar - ) + calendar = None if self.lbmond == 0 or self.lbdatd == 0 else self.calendar self._t2 = cftime.datetime( self.lbyrd, self.lbmond, @@ -1599,9 +1559,7 @@ def t1(self): """ if not hasattr(self, "_t1"): has_year_zero = self.lbyr == 0 or None - calendar = ( - None if self.lbmon == 0 or self.lbdat == 0 else self.calendar - ) + calendar = None if self.lbmon == 0 or self.lbdat == 0 else self.calendar self._t1 = cftime.datetime( self.lbyr, self.lbmon, @@ -1634,9 +1592,7 @@ def t2(self): """ if not hasattr(self, "_t2"): has_year_zero = self.lbyrd == 0 or None - calendar = ( - None if self.lbmond == 0 or self.lbdatd == 0 else self.calendar - ) + calendar = None if self.lbmond == 0 or self.lbdatd == 0 else self.calendar self._t2 = cftime.datetime( self.lbyrd, self.lbmond, @@ -1701,9 +1657,7 @@ def load(filename, read_data=False, little_ended=False): """ return _interpret_fields( - _field_gen( - filename, read_data_bytes=read_data, little_ended=little_ended - ) + _field_gen(filename, read_data_bytes=read_data, little_ended=little_ended) ) @@ -1772,9 +1726,7 @@ def _interpret_fields(fields): for field in landmask_compressed_fields: field.lbrow, field.lbnpt = mask_shape - _create_field_data( - field, mask_shape, land_mask_field=land_mask_field - ) + _create_field_data(field, mask_shape, land_mask_field=land_mask_field) yield field @@ -1911,17 +1863,13 @@ def _field_gen(filename, read_data_bytes, little_ended=False): pp_file_seek(PP_WORD_DEPTH, os.SEEK_CUR) # Get the LONG header entries dtype = "%ci%d" % (dtype_endian_char, PP_WORD_DEPTH) - header_longs = np.fromfile( - pp_file, dtype=dtype, count=NUM_LONG_HEADERS - ) + header_longs = np.fromfile(pp_file, dtype=dtype, count=NUM_LONG_HEADERS) # Nothing returned => EOF if len(header_longs) == 0: break # Get the FLOAT header entries dtype = "%cf%d" % (dtype_endian_char, PP_WORD_DEPTH) - header_floats = np.fromfile( - pp_file, dtype=dtype, count=NUM_FLOAT_HEADERS - ) + header_floats = np.fromfile(pp_file, dtype=dtype, count=NUM_FLOAT_HEADERS) header = tuple(header_longs) + tuple(header_floats) # Make a PPField of the appropriate sub-class (depends on header @@ -1973,10 +1921,7 @@ def _field_gen(filename, read_data_bytes, little_ended=False): # Change data dtype for a little-ended file. dtype = str(dtype) if dtype[0] != ">": - msg = ( - "Unexpected dtype {!r} can't be converted to " - "little-endian" - ) + msg = "Unexpected dtype {!r} can't be converted to little-endian" raise ValueError(msg) dtype = np.dtype("<" + dtype[1:]) @@ -2184,9 +2129,7 @@ def load_pairs_from_fields(pp_fields): """ load_pairs_from_fields = iris.fileformats.rules.load_pairs_from_fields - return load_pairs_from_fields( - pp_fields, iris.fileformats.pp_load_rules.convert - ) + return load_pairs_from_fields(pp_fields, iris.fileformats.pp_load_rules.convert) def _load_cubes_variable_loader( @@ -2223,9 +2166,7 @@ def _load_cubes_variable_loader( iris.fileformats.pp_load_rules.convert, ) - result = iris.fileformats.rules.load_cubes( - filenames, callback, loader, pp_filter - ) + result = iris.fileformats.rules.load_cubes(filenames, callback, loader, pp_filter) if um_fast_load.STRUCTURED_LOAD_CONTROLS.loads_use_structured: # We need an additional concatenate-like operation to combine cubes @@ -2362,9 +2303,7 @@ def save_pairs_from_cube(cube, field_coords=None, target=None): for name, positions in pp_field.HEADER_DEFN: # Establish whether field name is integer or real default = ( - 0 - if positions[0] <= NUM_LONG_HEADERS - UM_TO_PP_HEADER_OFFSET - else 0.0 + 0 if positions[0] <= NUM_LONG_HEADERS - UM_TO_PP_HEADER_OFFSET else 0.0 ) # Establish whether field position is scalar or composite if len(positions) > 1: diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 1aed25311d..fcc54951b6 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -215,8 +215,7 @@ def _convert_vertical_coords( # Pressure. if (lbvc == 8) and ( - len(lbcode) != 5 - or (len(lbcode) == 5 and 1 not in [lbcode.ix, lbcode.iy]) + len(lbcode) != 5 or (len(lbcode) == 5 and 1 not in [lbcode.ix, lbcode.iy]) ): coord = _dim_or_aux(blev, long_name="pressure", units="hPa") coords_and_dims.append((coord, dim)) @@ -334,16 +333,13 @@ def _reshape_vector_args(values_and_dims): value = np.asarray(value) if len(dims) != value.ndim: raise ValueError( - "Lengths of dimension-mappings must match " - "input array dimensions." + "Lengths of dimension-mappings must match input array dimensions." ) # Save dim sizes in original order. original_shape = value.shape if dims: # Transpose values to put its dims in the target order. - dims_order = sorted( - range(len(dims)), key=lambda i_dim: dims[i_dim] - ) + dims_order = sorted(range(len(dims)), key=lambda i_dim: dims[i_dim]) value = value.transpose(dims_order) if max_dim != -1: # Reshape to add any extra *1 dims. @@ -437,9 +433,7 @@ def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): orig_points_dtype = np.asarray(points).dtype bounds = None if lower_and_upper_bounds is not None: - lower_bounds, upper_bounds = np.broadcast_arrays( - *lower_and_upper_bounds - ) + lower_bounds, upper_bounds = np.broadcast_arrays(*lower_and_upper_bounds) orig_bounds_dtype = lower_bounds.dtype bounds = np.vstack((lower_bounds, upper_bounds)).T @@ -449,9 +443,7 @@ def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): points, bounds = _collapse_degenerate_points_and_bounds(points, bounds) - used_dims = tuple( - i_dim for i_dim in range(points.ndim) if points.shape[i_dim] > 1 - ) + used_dims = tuple(i_dim for i_dim in range(points.ndim) if points.shape[i_dim] > 1) reshape_inds = tuple([points.shape[dim] for dim in used_dims]) points = points.reshape(reshape_inds) points = points.astype(orig_points_dtype) @@ -702,9 +694,7 @@ def date2year(t_in): ) ): coords_and_dims.append( - _new_coord_and_dims( - do_vector, "time", epoch_hours_unit, t1_epoch_hours - ) + _new_coord_and_dims(do_vector, "time", epoch_hours_unit, t1_epoch_hours) ) if ( @@ -726,9 +716,7 @@ def date2year(t_in): ) ) coords_and_dims.append( - _new_coord_and_dims( - do_vector, "time", epoch_hours_unit, t1_epoch_hours - ) + _new_coord_and_dims(do_vector, "time", epoch_hours_unit, t1_epoch_hours) ) coords_and_dims.append( _new_coord_and_dims( @@ -823,12 +811,7 @@ def date2year(t_in): ) ) - if ( - (len(lbcode) == 5) - and (lbcode[-1] == 3) - and (lbtim.ib == 2) - and (lbtim.ic == 2) - ): + if (len(lbcode) == 5) and (lbcode[-1] == 3) and (lbtim.ib == 2) and (lbtim.ic == 2): coords_and_dims.append( _new_coord_and_dims( do_vector, @@ -944,14 +927,10 @@ def convert(f): factories.extend(vertical_factories) # Realization (aka ensemble) (--> scalar coordinates) - aux_coords_and_dims.extend( - _convert_scalar_realization_coords(lbrsvd4=f.lbrsvd[3]) - ) + aux_coords_and_dims.extend(_convert_scalar_realization_coords(lbrsvd4=f.lbrsvd[3])) # Pseudo-level coordinate (--> scalar coordinates) - aux_coords_and_dims.extend( - _convert_scalar_pseudo_level_coords(lbuser5=f.lbuser[4]) - ) + aux_coords_and_dims.extend(_convert_scalar_pseudo_level_coords(lbuser5=f.lbuser[4])) # All the other rules. ( @@ -1135,12 +1114,7 @@ def _all_other_rules(f): ) # "Normal" (i.e. not cross-sectional) lats+lons (--> vector coordinates) - if ( - f.bdx != 0.0 - and f.bdx != f.bmdi - and len(f.lbcode) != 5 - and f.lbcode[0] == 1 - ): + if f.bdx != 0.0 and f.bdx != f.bmdi and len(f.lbcode) != 5 and f.lbcode[0] == 1: dim_coords_and_dims.append( ( DimCoord.from_regular( @@ -1156,12 +1130,7 @@ def _all_other_rules(f): ) ) - if ( - f.bdx != 0.0 - and f.bdx != f.bmdi - and len(f.lbcode) != 5 - and f.lbcode[0] == 2 - ): + if f.bdx != 0.0 and f.bdx != f.bmdi and len(f.lbcode) != 5 and f.lbcode[0] == 2: dim_coords_and_dims.append( ( DimCoord.from_regular( @@ -1178,12 +1147,7 @@ def _all_other_rules(f): ) ) - if ( - f.bdy != 0.0 - and f.bdy != f.bmdi - and len(f.lbcode) != 5 - and f.lbcode[0] == 1 - ): + if f.bdy != 0.0 and f.bdy != f.bmdi and len(f.lbcode) != 5 and f.lbcode[0] == 1: dim_coords_and_dims.append( ( DimCoord.from_regular( @@ -1198,12 +1162,7 @@ def _all_other_rules(f): ) ) - if ( - f.bdy != 0.0 - and f.bdy != f.bmdi - and len(f.lbcode) != 5 - and f.lbcode[0] == 2 - ): + if f.bdy != 0.0 and f.bdy != f.bmdi and len(f.lbcode) != 5 and f.lbcode[0] == 2: dim_coords_and_dims.append( ( DimCoord.from_regular( @@ -1253,11 +1212,7 @@ def _all_other_rules(f): ) # Cross-sectional vertical level types (--> vector coordinates) - if ( - len(f.lbcode) == 5 - and f.lbcode.iy == 2 - and (f.bdy == 0 or f.bdy == f.bmdi) - ): + if len(f.lbcode) == 5 and f.lbcode.iy == 2 and (f.bdy == 0 or f.bdy == f.bmdi): dim_coords_and_dims.append( ( DimCoord( @@ -1285,12 +1240,7 @@ def _all_other_rules(f): ) ) - if ( - len(f.lbcode) == 5 - and f.lbcode.ix == 10 - and f.bdx != 0 - and f.bdx != f.bmdi - ): + if len(f.lbcode) == 5 and f.lbcode.ix == 10 and f.bdx != 0 and f.bdx != f.bmdi: dim_coords_and_dims.append( ( DimCoord.from_regular( @@ -1305,30 +1255,18 @@ def _all_other_rules(f): ) ) - if ( - len(f.lbcode) == 5 - and f.lbcode.iy == 1 - and (f.bdy == 0 or f.bdy == f.bmdi) - ): + if len(f.lbcode) == 5 and f.lbcode.iy == 1 and (f.bdy == 0 or f.bdy == f.bmdi): dim_coords_and_dims.append( ( - DimCoord( - f.y, long_name="pressure", units="hPa", bounds=f.y_bounds - ), + DimCoord(f.y, long_name="pressure", units="hPa", bounds=f.y_bounds), 0, ) ) - if ( - len(f.lbcode) == 5 - and f.lbcode.ix == 1 - and (f.bdx == 0 or f.bdx == f.bmdi) - ): + if len(f.lbcode) == 5 and f.lbcode.ix == 1 and (f.bdx == 0 or f.bdx == f.bmdi): dim_coords_and_dims.append( ( - DimCoord( - f.x, long_name="pressure", units="hPa", bounds=f.x_bounds - ), + DimCoord(f.x, long_name="pressure", units="hPa", bounds=f.x_bounds), 1, ) ) @@ -1383,9 +1321,7 @@ def _all_other_rules(f): dim_coords_and_dims.append( ( DimCoord( - np.linspace( - t1_epoch_days, t2_epoch_days, f.lbrow, endpoint=False - ), + np.linspace(t1_epoch_days, t2_epoch_days, f.lbrow, endpoint=False), standard_name="time", units=epoch_days_unit, bounds=f.y_bounds, @@ -1395,12 +1331,7 @@ def _all_other_rules(f): ) # Site number (--> scalar coordinate) - if ( - len(f.lbcode) == 5 - and f.lbcode[-1] == 1 - and f.lbcode.ix == 13 - and f.bdx != 0 - ): + if len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.ix == 13 and f.bdx != 0: dim_coords_and_dims.append( ( DimCoord.from_regular( @@ -1518,9 +1449,7 @@ def _all_other_rules(f): coord.guess_bounds() unhandled_lbproc = False elif f.lbcode == 101: - cell_methods.append( - CellMethod(zone_method, coords="grid_longitude") - ) + cell_methods.append(CellMethod(zone_method, coords="grid_longitude")) for coord, _dim in dim_coords_and_dims: if coord.standard_name == "grid_longitude": if len(coord.points) == 1: @@ -1550,11 +1479,7 @@ def _all_other_rules(f): um_minor = (f.lbsrce // 10000) % 100 attributes["um_version"] = "{:d}.{:d}".format(um_major, um_minor) - if ( - f.lbuser[6] != 0 - or (f.lbuser[3] // 1000) != 0 - or (f.lbuser[3] % 1000) != 0 - ): + if f.lbuser[6] != 0 or (f.lbuser[3] // 1000) != 0 or (f.lbuser[3] % 1000) != 0: attributes["STASH"] = f.stash if str(f.stash) in STASH_TO_CF: diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 9effba3c0a..0d26061ac7 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -35,10 +35,7 @@ def _basic_coord_system_rules(cube, pp): The PP field with updated metadata. """ - if ( - cube.coord_system("GeogCS") is not None - or cube.coord_system(None) is None - ): + if cube.coord_system("GeogCS") is not None or cube.coord_system(None) is None: pp.bplat = 90 pp.bplon = 0 elif cube.coord_system("RotatedGeogCS") is not None: @@ -126,17 +123,11 @@ def _general_time_rules(cube, pp): pp.t2 = cftime.datetime(0, 0, 0, calendar=None, has_year_zero=True) # Forecast. - if ( - time_coord is not None - and not time_coord.has_bounds() - and fp_coord is not None - ): + if time_coord is not None and not time_coord.has_bounds() and fp_coord is not None: pp.lbtim.ia = 0 pp.lbtim.ib = 1 pp.t1 = time_coord.units.num2date(time_coord.points[0]) - pp.t2 = time_coord.units.num2date( - time_coord.points[0] - fp_coord.points[0] - ) + pp.t2 = time_coord.units.num2date(time_coord.points[0] - fp_coord.points[0]) pp.lbft = fp_coord.points[0] # Time mean (non-climatological). @@ -166,12 +157,8 @@ def _general_time_rules(cube, pp): pp.lbtim.ib = 2 pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0]) pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1]) - stop = time_coord.units.convert( - time_coord.bounds[0, 1], "hours since epoch" - ) - start = frt_coord.units.convert( - frt_coord.points[0], "hours since epoch" - ) + stop = time_coord.units.convert(time_coord.bounds[0, 1], "hours since epoch") + start = frt_coord.units.convert(frt_coord.points[0], "hours since epoch") pp.lbft = stop - start if ( @@ -238,12 +225,8 @@ def _general_time_rules(cube, pp): pp.lbtim.ia = int(cm_time_max.intervals[0][:-5]) if time_coord is not None and time_coord.has_bounds(): - lower_bound_yr = time_coord.units.num2date( - time_coord.bounds[0, 0] - ).year - upper_bound_yr = time_coord.units.num2date( - time_coord.bounds[0, 1] - ).year + lower_bound_yr = time_coord.units.num2date(time_coord.bounds[0, 0]).year + upper_bound_yr = time_coord.units.num2date(time_coord.bounds[0, 1]).year else: lower_bound_yr = None upper_bound_yr = None @@ -631,10 +614,7 @@ def _lbproc_rules(cube, pp): if cube.attributes.get("ukmo__process_flags", None): pp.lbproc += sum( - [ - LBPROC_MAP[name] - for name in cube.attributes["ukmo__process_flags"] - ] + [LBPROC_MAP[name] for name in cube.attributes["ukmo__process_flags"]] ) # Zonal-mean: look for a CellMethod which is a "mean" over "longitude" or @@ -863,10 +843,7 @@ def _all_other_rules(cube, pp): pp.lbfc = CF_TO_LBFC[check_items] # Set field code. - if ( - "STASH" in cube.attributes - and str(cube.attributes["STASH"]) in STASH_TRANS - ): + if "STASH" in cube.attributes and str(cube.attributes["STASH"]) in STASH_TRANS: pp.lbfc = STASH_TRANS[str(cube.attributes["STASH"])].field_code # Set ensemble member number. diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index bcfd4f8323..9326901da2 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -18,9 +18,7 @@ import iris.fileformats.um_cf_map Factory = collections.namedtuple("Factory", ["factory_class", "args"]) -ReferenceTarget = collections.namedtuple( - "ReferenceTarget", ("name", "transform") -) +ReferenceTarget = collections.namedtuple("ReferenceTarget", ("name", "transform")) class ConcreteReferenceTarget: @@ -166,20 +164,17 @@ def _dereference_args(factory, reference_targets, regrid_cache, cube): attributes=src.attributes, ) dims = [ - cube.coord_dims(src_coord)[0] - for src_coord in src.dim_coords + cube.coord_dims(src_coord)[0] for src_coord in src.dim_coords ] cube.add_aux_coord(new_coord, dims) args.append(new_coord) else: raise _ReferenceError( - "Unable to regrid reference for" - " {!r}".format(arg.name) + "Unable to regrid reference for {!r}".format(arg.name) ) else: raise _ReferenceError( - "The source data contains no " - "field(s) for {!r}.".format(arg.name) + "The source data contains no field(s) for {!r}.".format(arg.name) ) else: # If it wasn't a Reference, then arg is a dictionary @@ -228,9 +223,7 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): # So we can use `iris.analysis.interpolate.linear()` later, # ensure each target coord is either a scalar or maps to a # single, distinct dimension. - target_dims = [ - target_cube.coord_dims(coord) for coord in target_coords - ] + target_dims = [target_cube.coord_dims(coord) for coord in target_coords] target_dims = list(filter(None, target_dims)) unique_dims = set() for dims in target_dims: @@ -251,9 +244,7 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): result_cube = cubes[i] except ValueError: # Not already cached, so do the hard work of interpolating. - result_cube = _regrid_to_target( - src_cube, target_coords, target_cube - ) + result_cube = _regrid_to_target(src_cube, target_coords, target_cube) # Add it to the cache. grids.append(target_coords) cubes.append(result_cube) @@ -282,9 +273,7 @@ def __new__(cls, field_generator, field_generator_kwargs, converter): A callable that converts a field object into a Cube. """ - return tuple.__new__( - cls, (field_generator, field_generator_kwargs, converter) - ) + return tuple.__new__(cls, (field_generator, field_generator_kwargs, converter)) ConversionMetadata = collections.namedtuple( @@ -374,9 +363,7 @@ def _load_pairs_from_fields_and_filenames( # Post modify the new cube with a user-callback. # This is an ordinary Iris load callback, so it takes the filename. - cube = iris.io.run_callback( - user_callback_wrapper, cube, field, filename - ) + cube = iris.io.run_callback(user_callback_wrapper, cube, field, filename) # Callback mechanism may return None, which must not be yielded. if cube is None: continue diff --git a/lib/iris/fileformats/um/_fast_load.py b/lib/iris/fileformats/um/_fast_load.py index ce9d183586..a75d7b16f4 100644 --- a/lib/iris/fileformats/um/_fast_load.py +++ b/lib/iris/fileformats/um/_fast_load.py @@ -210,12 +210,8 @@ def adjust(dims): return [(coord, adjust(dims)) for coord, dims in coords_and_dims] n_collation_dims = len(collation.vector_dims_shape) - dim_coords_and_dims = _adjust_dims( - dim_coords_and_dims, n_collation_dims - ) - aux_coords_and_dims = _adjust_dims( - aux_coords_and_dims, n_collation_dims - ) + dim_coords_and_dims = _adjust_dims(dim_coords_and_dims, n_collation_dims) + aux_coords_and_dims = _adjust_dims(aux_coords_and_dims, n_collation_dims) # Dimensions to which we've already assigned dimension coordinates. dim_coord_dims = set() @@ -299,15 +295,11 @@ def key_func(item): ) ) if len(dims) > 1: - raise TranslationError( - "Unsupported multiple values for vertical " "dimension." - ) + raise TranslationError("Unsupported multiple values for vertical dimension.") if dims: v_dims = dims.pop() if len(v_dims) > 1: - raise TranslationError( - "Unsupported multi-dimension vertical " "headers." - ) + raise TranslationError("Unsupported multi-dimension vertical headers.") else: v_dims = () coords_and_dims, factories = _convert_vertical_coords( diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py index 2a41cf99ba..44a9520c8b 100644 --- a/lib/iris/fileformats/um/_fast_load_structured_fields.py +++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py @@ -17,9 +17,7 @@ import numpy as np from iris._lazy_data import as_lazy_data, multidim_lazy_stack -from iris.fileformats.um._optimal_array_structuring import ( - optimal_array_structure, -) +from iris.fileformats.um._optimal_array_structuring import optimal_array_structure class BasicFieldCollation: @@ -74,9 +72,7 @@ def data(self): self._calculate_structure() if self._data_cache is None: stack = np.empty(self.vector_dims_shape, "object") - for nd_index, field in zip( - np.ndindex(self.vector_dims_shape), self.fields - ): + for nd_index, field in zip(np.ndindex(self.vector_dims_shape), self.fields): stack[nd_index] = as_lazy_data(field._data) self._data_cache = multidim_lazy_stack(stack) return self._data_cache @@ -86,9 +82,7 @@ def core_data(self): @property def realised_dtype(self): - return np.result_type( - *[field.realised_dtype for field in self._fields] - ) + return np.result_type(*[field.realised_dtype for field in self._fields]) @property def data_proxy(self): @@ -230,10 +224,7 @@ def _calculate_structure(self): # Flatten out the array apart from the last dimension, # convert to cftime objects, then reshape back. arr = np.array( - [ - cftime.datetime(*args) - for args in arr.reshape(-1, extra_length) - ] + [cftime.datetime(*args) for args in arr.reshape(-1, extra_length)] ).reshape(arr_shape) vector_element_arrays_and_dims[name] = (arr, dims) diff --git a/lib/iris/fileformats/um/_optimal_array_structuring.py b/lib/iris/fileformats/um/_optimal_array_structuring.py index 3fd892808b..b3a8bdc40d 100644 --- a/lib/iris/fileformats/um/_optimal_array_structuring.py +++ b/lib/iris/fileformats/um/_optimal_array_structuring.py @@ -123,9 +123,7 @@ def optimal_array_structure(ordering_elements, actual_values_elements=None): } # Calculate the basic fields-group array structure. - base_structure = GroupStructure.from_component_arrays( - element_ordering_arrays - ) + base_structure = GroupStructure.from_component_arrays(element_ordering_arrays) # Work out the target cube structure. target_structure = _optimal_dimensioning_structure( @@ -139,9 +137,7 @@ def optimal_array_structure(ordering_elements, actual_values_elements=None): elements_length = len(ordering_elements[0][1]) vector_dims_shape = (elements_length,) else: - vector_dims_shape = tuple( - struct.size for (_, struct) in target_structure - ) + vector_dims_shape = tuple(struct.size for (_, struct) in target_structure) # Build arrays of element values mapped onto the vectorised dimensions. elements_and_dimensions = base_structure.build_arrays( diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 6dde73fb68..391c4bb6f5 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -26,9 +26,7 @@ def __setitem__(self, key, value): raise ValueError("A saver already exists for", key) for k in self.keys(): if k.endswith(key) or key.endswith(k): - raise ValueError( - "key %s conflicts with existing key %s" % (key, k) - ) + raise ValueError("key %s conflicts with existing key %s" % (key, k)) dict.__setitem__(self, key, value) @@ -75,9 +73,7 @@ def run_callback(callback, cube, field, filename): if result is None: result = cube elif not isinstance(result, Cube): - raise TypeError( - "Callback function returned an " "unhandled data type." - ) + raise TypeError("Callback function returned an unhandled data type.") return result @@ -165,17 +161,13 @@ def expand_filespecs(file_specs, files_expected=True): """ # Remove any hostname component - currently unused filenames = [ - os.path.abspath( - os.path.expanduser(fn[2:] if fn.startswith("//") else fn) - ) + os.path.abspath(os.path.expanduser(fn[2:] if fn.startswith("//") else fn)) for fn in file_specs ] if files_expected: # Try to expand all filenames as globs - glob_expanded = OrderedDict( - [[fn, sorted(glob.glob(fn))] for fn in filenames] - ) + glob_expanded = OrderedDict([[fn, sorted(glob.glob(fn))] for fn in filenames]) # If any of the specs expanded to an empty list then raise an error all_expanded = glob_expanded.values() @@ -187,9 +179,7 @@ def expand_filespecs(file_specs, files_expected=True): pattern, len(expanded) ) else: - msg += '\n * "{}" didn\'t match any files'.format( - pattern - ) + msg += '\n * "{}" didn\'t match any files'.format(pattern) raise IOError(msg) result = [fname for fnames in all_expanded for fname in fnames] else: @@ -218,18 +208,14 @@ def load_files(filenames, callback, constraints=None): handler_map = collections.defaultdict(list) for fn in all_file_paths: with open(fn, "rb") as fh: - handling_format_spec = FORMAT_AGENT.get_spec( - os.path.basename(fn), fh - ) + handling_format_spec = FORMAT_AGENT.get_spec(os.path.basename(fn), fh) handler_map[handling_format_spec].append(fn) # Call each iris format handler with the appropriate filenames for handling_format_spec in sorted(handler_map): fnames = handler_map[handling_format_spec] if handling_format_spec.constraint_aware_handler: - for cube in handling_format_spec.handler( - fnames, callback, constraints - ): + for cube in handling_format_spec.handler(fnames, callback, constraints): yield cube else: for cube in handling_format_spec.handler(fnames, callback): @@ -314,8 +300,7 @@ def _grib_save(cube, target, append=False, **kwargs): from iris_grib import save_grib2 except ImportError: raise RuntimeError( - "Unable to save GRIB file - " - '"iris_grib" package is not installed.' + "Unable to save GRIB file - " '"iris_grib" package is not installed.' ) save_grib2(cube, target, append, **kwargs) @@ -352,10 +337,7 @@ def add_saver(file_extension, new_saver): """ # Make sure it's a func with 2+ args - if ( - not hasattr(new_saver, "__call__") - or new_saver.__code__.co_argcount < 2 - ): + if not hasattr(new_saver, "__call__") or new_saver.__code__.co_argcount < 2: raise ValueError("Saver routines must be callable with 2+ arguments.") # Try to add this saver. Invalid keys will be rejected. @@ -494,8 +476,7 @@ def save(source, target, saver=None, **kwargs): # CubeList or sequence of cubes? elif isinstance(source, CubeList) or ( - isinstance(source, (list, tuple)) - and all([isinstance(i, Cube) for i in source]) + isinstance(source, (list, tuple)) and all([isinstance(i, Cube) for i in source]) ): # Only allow cubelist saving for those fileformats that are capable. if "iris.fileformats.netcdf" not in saver.__module__: diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index da64345cf3..ca205d01f5 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -198,8 +198,7 @@ def __init__( """ if not isinstance(file_element, FileElement): raise ValueError( - "file_element must be an instance of FileElement, got %r" - % file_element + "file_element must be an instance of FileElement, got %r" % file_element ) self._file_element = file_element diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index cd950828be..e85c670433 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -113,9 +113,7 @@ def izip(*cubes, **kwargs): # Loop over dimensioned coords in each cube. for dim in range(len(cube.shape)): if dim not in requested_dims: - dimensioned_iter_coords.update( - cube.coords(contains_dimension=dim) - ) + dimensioned_iter_coords.update(cube.coords(contains_dimension=dim)) dimensioned_iter_coords_by_cube.append(dimensioned_iter_coords) # Check for multidimensional coords - current implementation cannot @@ -169,9 +167,7 @@ def izip(*cubes, **kwargs): category=IrisUserWarning, ) - return _ZipSlicesIterator( - cubes, requested_dims_by_cube, ordered, coords_by_cube - ) + return _ZipSlicesIterator(cubes, requested_dims_by_cube, ordered, coords_by_cube) class _ZipSlicesIterator(Iterator): @@ -192,12 +188,11 @@ def __init__(self, cubes, requested_dims_by_cube, ordered, coords_by_cube): # mapping of values (itertool.izip won't catch this). if len(requested_dims_by_cube) != len(cubes): raise ValueError( - "requested_dims_by_cube parameter is not the same" - " length as cubes." + "requested_dims_by_cube parameter is not the same length as cubes." ) if len(coords_by_cube) != len(cubes): raise ValueError( - "coords_by_cube parameter is not the same length " "as cubes." + "coords_by_cube parameter is not the same length as cubes." ) # Create an all encompassing dims_index called master_dims_index that @@ -222,9 +217,7 @@ def __init__(self, cubes, requested_dims_by_cube, ordered, coords_by_cube): # Loop over coords in this dimension (could be just one). for coord in cube_coords: # Search for coord in master_dimensioned_coord_list. - for j, master_coords in enumerate( - master_dimensioned_coord_list - ): + for j, master_coords in enumerate(master_dimensioned_coord_list): # Use coord wrapper with desired equality # functionality. if _CoordWrapper(coord) in master_coords: diff --git a/lib/iris/palette.py b/lib/iris/palette.py index f640cf5687..7bfde5b6b6 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -300,12 +300,12 @@ def _load_palette(): # Integrity check for meta-data 'type' field. assert cmap_type is not None, ( - 'Missing meta-data "type" keyword for color map file, "%s"' - % filename + 'Missing meta-data "type" keyword for color map file, "%s"' % filename + ) + assert cmap_type == "rgb", 'Invalid type [%s] for color map file "%s"' % ( + cmap_type, + filename, ) - assert ( - cmap_type == "rgb" - ), 'Invalid type [%s] for color map file "%s"' % (cmap_type, filename) # Update the color map look-up dictionaries. CMAP_BREWER.add(cmap_name) @@ -330,9 +330,7 @@ def _load_palette(): if interpolate_flag: # Perform default color map interpolation for quantization # levels per primary color. - cmap = mpl_colors.LinearSegmentedColormap.from_list( - cmap_name, cmap_data - ) + cmap = mpl_colors.LinearSegmentedColormap.from_list(cmap_name, cmap_data) else: # Restrict quantization levels per primary color (turn-off # interpolation). diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 05447c0c48..31951c8537 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -112,10 +112,7 @@ def _series_index_unique(pandas_series: pandas.Series): else: result = None levels_combinations = chain( - *[ - combinations(levels_range, levels + 1) - for levels in levels_range - ] + *[combinations(levels_range, levels + 1) for levels in levels_range] ) for lc in levels_combinations: if pandas_series.groupby(level=lc).nunique().max() == 1: @@ -171,8 +168,7 @@ def as_cube( calendars = calendars or {} if pandas_array.ndim not in [1, 2]: raise ValueError( - "Only 1D or 2D Pandas arrays " - "can currently be converted to Iris cubes." + "Only 1D or 2D Pandas arrays can currently be converted to Iris cubes." ) # Make the copy work consistently across NumPy 1.6 and 1.7. @@ -182,9 +178,7 @@ def as_cube( order = "C" if copy else "A" data = np.array(pandas_array, copy=copy, order=order) cube = Cube(np.ma.masked_invalid(data, copy=False)) - _add_iris_coord( - cube, "index", pandas_array.index, 0, calendars.get(0, None) - ) + _add_iris_coord(cube, "index", pandas_array.index, 0, calendars.get(0, None)) if pandas_array.ndim == 2: _add_iris_coord( cube, @@ -384,8 +378,7 @@ def as_cubes( raise ValueError(message) if not ( - pandas_index.is_monotonic_increasing - or pandas_index.is_monotonic_decreasing + pandas_index.is_monotonic_increasing or pandas_index.is_monotonic_decreasing ): # Need monotonic index for use in DimCoord(s). # This function doesn't sort_index itself since that breaks the @@ -412,9 +405,7 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): # Common convenience to get the right DM in the right format for # Cube creation. calendar = calendars.get(name_) - instance = _get_dimensional_metadata( - name_, values_, calendar, dm_class_ - ) + instance = _get_dimensional_metadata(name_, values_, calendar, dm_class_) return (instance, dimensions_) # DimCoords. @@ -519,9 +510,7 @@ def _get_base(array): base = _get_base(values) np_base = _get_base(np_obj) if base is not np_base: - msg = "Pandas {} does not share memory".format( - type(pandas_obj).__name__ - ) + msg = "Pandas {} does not share memory".format(type(pandas_obj).__name__) raise AssertionError(msg) @@ -531,9 +520,7 @@ def _make_dim_coord_list(cube): for dimn in range(cube.ndim): dimn_coord = cube.coords(dimensions=dimn, dim_coords=True) if dimn_coord: - outlist += [ - [dimn_coord[0].name(), _as_pandas_coord(dimn_coord[0])] - ] + outlist += [[dimn_coord[0].name(), _as_pandas_coord(dimn_coord[0])]] else: outlist += [[f"dim{dimn}", range(cube.shape[dimn])]] return list(zip(*outlist)) @@ -870,9 +857,7 @@ def merge_metadata(meta_var_list): coord_names, coords = _make_dim_coord_list(cube) # Make base DataFrame index = pandas.MultiIndex.from_product(coords, names=coord_names) - data_frame = pandas.DataFrame( - data.ravel(), columns=[cube.name()], index=index - ) + data_frame = pandas.DataFrame(data.ravel(), columns=[cube.name()], index=index) if add_aux_coords: data_frame = merge_metadata(_make_aux_coord_list(cube)) diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 977cbbcfc2..b432ed7620 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -163,9 +163,7 @@ def guess_axis(coord): if coord is None: aux_coords = cube.coords(dimensions=dim) aux_coords = [ - coord - for coord in aux_coords - if isinstance(coord, iris.coords.DimCoord) + coord for coord in aux_coords if isinstance(coord, iris.coords.DimCoord) ] if aux_coords: aux_coords.sort(key=lambda coord: coord.metadata) @@ -173,15 +171,11 @@ def guess_axis(coord): # If plotting a 2 dimensional plot, check for 2d coordinates if ndims == 2: - missing_dims = [ - dim for dim, coord in enumerate(coords) if coord is None - ] + missing_dims = [dim for dim, coord in enumerate(coords) if coord is None] if missing_dims: # Note that this only picks up coordinates that span the dims two_dim_coords = cube.coords(dimensions=missing_dims) - two_dim_coords = [ - coord for coord in two_dim_coords if coord.ndim == 2 - ] + two_dim_coords = [coord for coord in two_dim_coords if coord.ndim == 2] if len(two_dim_coords) >= 2: two_dim_coords.sort(key=lambda coord: coord.metadata) coords = two_dim_coords[:2] @@ -195,10 +189,7 @@ def guess_axis(coord): axis = "Z" if axis in axes: for coord in cube.coords(dim_coords=False): - if ( - max(coord.shape) > 1 - and iris.util.guess_coord_axis(coord) == axis - ): + if max(coord.shape) > 1 and iris.util.guess_coord_axis(coord) == axis: coords[axes.index(axis)] = coord # Re-order the coordinates to achieve the preferred @@ -222,9 +213,7 @@ def sort_key(coord): def _can_draw_map(coords): std_names = [ - c and c.standard_name - for c in coords - if isinstance(c, iris.coords.Coord) + c and c.standard_name for c in coords if isinstance(c, iris.coords.Coord) ] valid_std_names = [ ["latitude", "longitude"], @@ -392,9 +381,7 @@ def _draw_2d_from_bounds(draw_method_name, cube, *args, **kwargs): # Get & remove the coords entry from kwargs. coords = kwargs.pop("coords", None) if coords is not None: - plot_defn = _get_plot_defn_custom_coords_picked( - cube, coords, mode, ndims=2 - ) + plot_defn = _get_plot_defn_custom_coords_picked(cube, coords, mode, ndims=2) else: plot_defn = _get_plot_defn(cube, mode, ndims=2) @@ -402,9 +389,7 @@ def _draw_2d_from_bounds(draw_method_name, cube, *args, **kwargs): for coord in plot_defn.coords: if hasattr(coord, "has_bounds") and coord.has_bounds(): - _check_bounds_contiguity_and_mask( - coord, data=cube.data, atol=contig_tol - ) + _check_bounds_contiguity_and_mask(coord, data=cube.data, atol=contig_tol) if _can_draw_map(plot_defn.coords): result = _map_common( @@ -546,8 +531,7 @@ def _draw_2d_from_points(draw_method_name, arg_func, cube, *args, **kwargs): if values.dtype.char in "SU": if values.ndim != 1: raise ValueError( - "Multi-dimensional string coordinates " - "not supported." + "Multi-dimensional string coordinates not supported." ) plot_arrays.append(np.arange(values.size)) string_axes[axis_name] = values @@ -595,8 +579,7 @@ def _fixup_dates(coord, values): raise IrisError(msg) r = [ - cftime.datetime(*date, calendar=coord.units.calendar) - for date in dates + cftime.datetime(*date, calendar=coord.units.calendar) for date in dates ] values = np.empty(len(r), dtype=object) @@ -637,9 +620,7 @@ def _u_object_from_v_object(v_object): def _get_plot_objects(args): - if len(args) > 2 and isinstance( - args[2], (iris.cube.Cube, iris.coords.Coord) - ): + if len(args) > 2 and isinstance(args[2], (iris.cube.Cube, iris.coords.Coord)): # three arguments u_object, v_object1, v_object2 = args[:3] u1, v1 = _uv_from_u_object_v_object(u_object, v_object1) @@ -660,9 +641,7 @@ def _get_plot_objects(args): u = u1 v = (v1, v2) v_object = (v_object1, v_object2) - elif len(args) > 1 and isinstance( - args[1], (iris.cube.Cube, iris.coords.Coord) - ): + elif len(args) > 1 and isinstance(args[1], (iris.cube.Cube, iris.coords.Coord)): # two arguments u_object, v_object = args[:2] u, v = _uv_from_u_object_v_object(u_object, v_object) @@ -711,9 +690,7 @@ def _get_geodesic_params(globe): semimajor = globe.semiminor_axis / (1.0 - flattening) elif flattening is None: if globe.semiminor_axis is not None: - flattening = (semimajor - globe.semiminor_axis) / float( - semimajor - ) + flattening = (semimajor - globe.semiminor_axis) / float(semimajor) else: # Has inverse flattening or raises error flattening = 1.0 / globe.inverse_flattening @@ -812,9 +789,7 @@ def _draw_1d_from_points(draw_method_name, arg_func, *args, **kwargs): ): # Replace non-cartopy subplot/axes with a cartopy alternative and set # the transform keyword. - kwargs = _ensure_cartopy_axes_and_determine_kwargs( - u_object, v_object, kwargs - ) + kwargs = _ensure_cartopy_axes_and_determine_kwargs(u_object, v_object, kwargs) if draw_method_name == "plot" and u_object.standard_name not in ( "projection_x_coordinate", "projection_y_coordinate", @@ -866,9 +841,7 @@ def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): ): # Replace non-cartopy subplot/axes with a cartopy alternative and set # the transform keyword. - kwargs = _ensure_cartopy_axes_and_determine_kwargs( - u_object, v_object1, kwargs - ) + kwargs = _ensure_cartopy_axes_and_determine_kwargs(u_object, v_object1, kwargs) axes = kwargs.pop("axes", None) draw_method = getattr(axes if axes else plt, draw_method_name) @@ -926,9 +899,7 @@ def _ensure_cartopy_axes_and_determine_kwargs(x_coord, y_coord, kwargs): """ # Determine projection. if x_coord.coord_system != y_coord.coord_system: - raise ValueError( - "The X and Y coordinates must have equal coordinate" " systems." - ) + raise ValueError("The X and Y coordinates must have equal coordinate systems.") cs = x_coord.coord_system if cs is not None: cartopy_proj = cs.as_cartopy_projection() @@ -955,7 +926,7 @@ def _ensure_cartopy_axes_and_determine_kwargs(x_coord, y_coord, kwargs): _replace_axes_with_cartopy_axes(cartopy_proj) elif axes and not isinstance(axes, cartopy.mpl.geoaxes.GeoAxes): raise TypeError( - "The supplied axes instance must be a cartopy " "GeoAxes instance." + "The supplied axes instance must be a cartopy GeoAxes instance." ) # Set the "from transform" keyword. @@ -986,9 +957,7 @@ def _check_geostationary_coords_and_convert(x, y, kwargs): return x, y -def _map_common( - draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwargs -): +def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwargs): """ Draw the given cube on a map using its points or bounds. @@ -1040,9 +1009,7 @@ def _map_common( # placed in the CS. if getattr(x_coord, "circular", False): original_length = y.shape[1] - _, direction = iris.util.monotonic( - x_coord.points, return_direction=True - ) + _, direction = iris.util.monotonic(x_coord.points, return_direction=True) y = np.append(y, y[:, 0:1], axis=1) x = np.append(x, x[:, 0:1] + 360 * direction, axis=1) data = ma.concatenate([data, data[:, 0:1]], axis=1) @@ -1057,16 +1024,12 @@ def _map_common( if val_arr.ndim >= 2 and val_arr.shape[1] == original_length: # Concatenate the first column to the end of the data then # update kwargs - val_arr = ma.concatenate( - [val_arr, val_arr[:, 0:1, ...]], axis=1 - ) + val_arr = ma.concatenate([val_arr, val_arr[:, 0:1, ...]], axis=1) kwargs[key] = val_arr # Replace non-cartopy subplot/axes with a cartopy alternative and set the # transform keyword. - kwargs = _ensure_cartopy_axes_and_determine_kwargs( - x_coord, y_coord, kwargs - ) + kwargs = _ensure_cartopy_axes_and_determine_kwargs(x_coord, y_coord, kwargs) # Make Geostationary coordinates plot-able. x, y = _check_geostationary_coords_and_convert(x, y, kwargs) @@ -1246,9 +1209,7 @@ def _fill_orography(cube, coords, mode, vert_plot, horiz_plot, style_args): orography = cube.coord("surface_altitude") if coords is not None: - plot_defn = _get_plot_defn_custom_coords_picked( - cube, coords, mode, ndims=2 - ) + plot_defn = _get_plot_defn_custom_coords_picked(cube, coords, mode, ndims=2) else: plot_defn = _get_plot_defn(cube, mode, ndims=2) v_coord, u_coord = plot_defn.coords @@ -1488,9 +1449,7 @@ def points(cube, *args, **kwargs): def _scatter_args(u, v, data, *args, **kwargs): return ((u, v) + args, kwargs) - return _draw_2d_from_points( - "scatter", _scatter_args, cube, *args, **kwargs - ) + return _draw_2d_from_points("scatter", _scatter_args, cube, *args, **kwargs) def _vector_component_args(x_points, y_points, u_data, *args, **kwargs): @@ -1772,9 +1731,7 @@ def fill_between(x, y1, y2, *args, **kwargs): raise TypeError("y2 must be a cube or a coordinate.") args = (x, y1, y2) + args _plot_args = None - return _draw_two_1d_from_points( - "fill_between", _plot_args, *args, **kwargs - ) + return _draw_two_1d_from_points("fill_between", _plot_args, *args, **kwargs) def hist(x, *args, **kwargs): @@ -1806,8 +1763,7 @@ def hist(x, *args, **kwargs): data = x._values else: raise TypeError( - "x must be a cube, coordinate, cell measure or " - "ancillary variable." + "x must be a cube, coordinate, cell measure or ancillary variable." ) return plt.hist(data, *args, **kwargs) @@ -2008,19 +1964,13 @@ def update_animation_iris(i, cubes, vmin, vmax, coords): supported = ["iris.plot", "iris.quickplot"] if plot_func.__module__ not in supported: - msg = ( - 'Given plotting module "{}" may not be supported, intended ' - "use: {}." - ) + msg = 'Given plotting module "{}" may not be supported, intended ' "use: {}." msg = msg.format(plot_func.__module__, supported) warnings.warn(msg, category=IrisUnsupportedPlottingWarning) supported = ["contour", "contourf", "pcolor", "pcolormesh"] if plot_func.__name__ not in supported: - msg = ( - 'Given plotting function "{}" may not be supported, intended ' - "use: {}." - ) + msg = 'Given plotting function "{}" may not be supported, intended ' "use: {}." msg = msg.format(plot_func.__name__, supported) warnings.warn(msg, category=IrisUnsupportedPlottingWarning) diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 15f4cf11e2..6523959420 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -62,9 +62,7 @@ def _label(cube, mode, result=None, ndims=2, coords=None, axes=None): bar = plt.colorbar( result, ax=axes, orientation="horizontal", drawedges=draw_edges ) - has_known_units = not ( - cube.units.is_unknown() or cube.units.is_no_unit() - ) + has_known_units = not (cube.units.is_unknown() or cube.units.is_no_unit()) if has_known_units and cube.units != cf_units.Unit("1"): # Use shortest unit representation for anything other than time if _use_symbol(cube.units): @@ -89,10 +87,7 @@ def _label(cube, mode, result=None, ndims=2, coords=None, axes=None): axes.set_xlabel(_title(plot_defn.coords[0], with_units=True)) axes.set_ylabel(_title(cube, with_units=True)) else: - msg = ( - "Unexpected number of dimensions ({}) given to " - "_label.".format(ndims) - ) + msg = "Unexpected number of dimensions ({}) given to _label.".format(ndims) raise ValueError(msg) @@ -130,9 +125,7 @@ def _label_1d_plot(*args, **kwargs): axes = kwargs.pop("axes", None) if len(kwargs) != 0: - msg = "Unexpected kwargs {} given to _label_1d_plot".format( - kwargs.keys() - ) + msg = "Unexpected kwargs {} given to _label_1d_plot".format(kwargs.keys()) raise ValueError(msg) if axes is None: diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index 2a8f447f3a..ce9ee51771 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -48,9 +48,7 @@ def _make_merged_patch(paths): all_vertices.shape = (total_len, 2) - return PathPatch( - Path(all_vertices, all_codes), facecolor="black", edgecolor="none" - ) + return PathPatch(Path(all_vertices, all_codes), facecolor="black", edgecolor="none") def _ring_path(): @@ -90,9 +88,7 @@ def _slot_path(): # removed. circle = Path.unit_circle() vertical_bar = _vertical_bar_path() - vertices = np.concatenate( - [circle.vertices[:-1], vertical_bar.vertices[-2::-1]] - ) + vertices = np.concatenate([circle.vertices[:-1], vertical_bar.vertices[-2::-1]]) codes = np.concatenate([circle.codes[:-1], vertical_bar.codes[:-1]]) return Path(vertices, codes) diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 83fdb6af89..d4ea42f8d9 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -124,16 +124,12 @@ def main(): 11, " NOTE: To compare results of failing tests, ", ) - lines.insert( - 12, " use idiff.py instead" - ) + lines.insert(12, " use idiff.py instead") lines.insert( 13, " --data-files-used Save a list of files used to a temporary file", ) - lines.insert( - 14, " -m Create missing test results" - ) + lines.insert(14, " -m Create missing test results") print("\n".join(lines)) else: unittest.main() @@ -445,9 +441,7 @@ def assertCML(self, cubes, reference_filename=None, checksum=True): reference_path = self.get_result_path(reference_filename) self._check_same(xml, reference_path) - def assertTextFile( - self, source_filename, reference_filename, desc="text file" - ): + def assertTextFile(self, source_filename, reference_filename, desc="text file"): """Check if two text files are the same, printing any diffs.""" with open(source_filename) as source_file: source_text = source_file.readlines() @@ -477,9 +471,7 @@ def assertDataAlmostEqual(self, data, reference_filename, **kwargs): with open(reference_path, "r") as reference_file: stats = json.load(reference_file) self.assertEqual(stats.get("shape", []), list(data.shape)) - self.assertEqual( - stats.get("masked", False), ma.is_masked(data) - ) + self.assertEqual(stats.get("masked", False), ma.is_masked(data)) nstats = np.array( ( stats.get("mean", 0.0), @@ -549,9 +541,7 @@ def assertString(self, string, reference_filename=None): reference_path = self.result_path(None, "txt") else: reference_path = self.get_result_path(reference_filename) - self._check_same( - string, reference_path, type_comparison_name="Strings" - ) + self._check_same(string, reference_path, type_comparison_name="Strings") def assertRepr(self, obj, reference_filename): self.assertString(repr(obj), reference_filename) @@ -562,9 +552,7 @@ def _check_same(self, item, reference_path, type_comparison_name="CML"): reference = "".join( part.decode("utf-8") for part in reference_fh.readlines() ) - self._assert_str_same( - reference, item, reference_path, type_comparison_name - ) + self._assert_str_same(reference, item, reference_path, type_comparison_name) else: self._ensure_folder(reference_path) with open(reference_path, "wb") as reference_fh: @@ -583,9 +571,7 @@ def assertXMLElement(self, obj, reference_filename): doc = iris.cube.Cube._sort_xml_attrs(doc) pretty_xml = doc.toprettyxml(indent=" ") reference_path = self.get_result_path(reference_filename) - self._check_same( - pretty_xml, reference_path, type_comparison_name="XML" - ) + self._check_same(pretty_xml, reference_path, type_comparison_name="XML") def assertArrayEqual(self, a, b, err_msg=""): np.testing.assert_array_equal(a, b, err_msg=err_msg) @@ -652,9 +638,7 @@ def assertNoWarningsRegexp(self, expected_regexp=""): def assertArrayAlmostEqual(self, a, b, decimal=6): np.testing.assert_array_almost_equal(a, b, decimal=decimal) - assertMaskedArrayAlmostEqual = staticmethod( - assert_masked_array_almost_equal - ) + assertMaskedArrayAlmostEqual = staticmethod(assert_masked_array_almost_equal) def assertArrayAllClose(self, a, b, rtol=1.0e-7, atol=1.0e-8, **kwargs): """ @@ -771,9 +755,7 @@ def _unique_id(self): def _check_reference_file(self, reference_path): reference_exists = os.path.isfile(reference_path) - if not ( - reference_exists or os.environ.get("IRIS_TEST_CREATE_MISSING") - ): + if not (reference_exists or os.environ.get("IRIS_TEST_CREATE_MISSING")): msg = "Missing test result: {}".format(reference_path) raise AssertionError(msg) return reference_exists @@ -863,15 +845,11 @@ def assertDictEqual(self, lhs, rhs, msg=None): """ if not isinstance(lhs, Mapping): - emsg = ( - f"Provided LHS argument is not a 'Mapping', got {type(lhs)}." - ) + emsg = f"Provided LHS argument is not a 'Mapping', got {type(lhs)}." self.fail(emsg) if not isinstance(rhs, Mapping): - emsg = ( - f"Provided RHS argument is not a 'Mapping', got {type(rhs)}." - ) + emsg = f"Provided RHS argument is not a 'Mapping', got {type(rhs)}." self.fail(emsg) if set(lhs.keys()) != set(rhs.keys()): @@ -899,9 +877,7 @@ def assertDictEqual(self, lhs, rhs, msg=None): raise AssertionError(emsg) self.assertMaskedArrayEqual(lvalue, rvalue) - elif isinstance(lvalue, np.ndarray) or isinstance( - rvalue, np.ndarray - ): + elif isinstance(lvalue, np.ndarray) or isinstance(rvalue, np.ndarray): if not isinstance(lvalue, np.ndarray): emsg = ( f"Dictionary key {key!r} values are not equal, " @@ -930,9 +906,7 @@ def assertDictEqual(self, lhs, rhs, msg=None): def assertEqualAndKind(self, value, expected): # Check a value, and also its type 'kind' = float/integer/string. self.assertEqual(value, expected) - self.assertEqual( - np.array(value).dtype.kind, np.array(expected).dtype.kind - ) + self.assertEqual(np.array(value).dtype.kind, np.array(expected).dtype.kind) get_data_path = IrisTest.get_data_path @@ -960,9 +934,7 @@ class MyDataTests(tests.IrisTest): or os.environ.get("IRIS_TEST_NO_DATA") ) - skip = unittest.skipIf( - condition=no_data, reason="Test(s) require external data." - ) + skip = unittest.skipIf(condition=no_data, reason="Test(s) require external data.") return skip(fn) @@ -978,9 +950,7 @@ class MyGeoTiffTests(test.IrisTest): ... """ - skip = unittest.skipIf( - condition=not GDAL_AVAILABLE, reason="Test requires 'gdal'." - ) + skip = unittest.skipIf(condition=not GDAL_AVAILABLE, reason="Test requires 'gdal'.") return skip(fn) @@ -1025,7 +995,7 @@ def wrapped(self, *args, **kwargs): self.assertEqual( 0, warn.call_count, - ("Got unexpected warnings." " \n{}".format(warn.call_args_list)), + ("Got unexpected warnings.\n{}".format(warn.call_args_list)), ) return result diff --git a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index 9190548b15..10723f1291 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -61,9 +61,7 @@ def _subsampled_coord(coord, subsamplefactor): raise ValueError("The coordinate must have bounds.") new_coord = coord[::subsamplefactor] new_bounds = new_coord.bounds.copy() - new_bounds[:, 1] = coord.bounds[ - (subsamplefactor - 1) :: subsamplefactor, 1 - ] + new_bounds[:, 1] = coord.bounds[(subsamplefactor - 1) :: subsamplefactor, 1] new_bounds[-1, 1] = coord.bounds[-1, 1] new_coord = coord.copy(points=new_coord.points, bounds=new_bounds) return new_coord @@ -109,9 +107,7 @@ def _resampled_coord(coord, samplefactor): lower = lower + delta upper = upper - delta samples = int(len(bounds) * samplefactor) - new_points, step = np.linspace( - lower, upper, samples, endpoint=False, retstep=True - ) + new_points, step = np.linspace(lower, upper, samples, endpoint=False, retstep=True) new_points += step * 0.5 new_coord = coord.copy(points=new_points) new_coord.guess_bounds() @@ -477,9 +473,7 @@ def test_cross_section(self): dest.add_dim_coord(lon, 1) dest.add_aux_coord(src.coord("grid_latitude").copy(), None) res = regrid_area_weighted(src, dest) - self.assertCMLApproxData( - res, RESULT_DIR + ("const_lat_cross_section.cml",) - ) + self.assertCMLApproxData(res, RESULT_DIR + ("const_lat_cross_section.cml",)) # Constant latitude, data order [x, z] # Using original and transposing the result should give the # same answer. @@ -487,9 +481,7 @@ def test_cross_section(self): dest.transpose() res = regrid_area_weighted(src, dest) res.transpose() - self.assertCMLApproxData( - res, RESULT_DIR + ("const_lat_cross_section.cml",) - ) + self.assertCMLApproxData(res, RESULT_DIR + ("const_lat_cross_section.cml",)) # Constant longitude src = self.realistic_cube[0, :, :, 10] @@ -501,9 +493,7 @@ def test_cross_section(self): dest.add_dim_coord(lat, 1) dest.add_aux_coord(src.coord("grid_longitude").copy(), None) res = regrid_area_weighted(src, dest) - self.assertCMLApproxData( - res, RESULT_DIR + ("const_lon_cross_section.cml",) - ) + self.assertCMLApproxData(res, RESULT_DIR + ("const_lon_cross_section.cml",)) # Constant longitude, data order [y, z] # Using original and transposing the result should give the # same answer. @@ -511,9 +501,7 @@ def test_cross_section(self): dest.transpose() res = regrid_area_weighted(src, dest) res.transpose() - self.assertCMLApproxData( - res, RESULT_DIR + ("const_lon_cross_section.cml",) - ) + self.assertCMLApproxData(res, RESULT_DIR + ("const_lon_cross_section.cml",)) def test_scalar_source_cube(self): src = self.simple_cube[1, 2] diff --git a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py index 2c7bad59ff..e8ac3f1db4 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py @@ -32,9 +32,7 @@ from iris.experimental.regrid_conservative import regrid_conservative_via_esmpy import iris.tests.stock as istk -_PLAIN_GEODETIC_CS = iris.coord_systems.GeogCS( - i_cartog.DEFAULT_SPHERICAL_EARTH_RADIUS -) +_PLAIN_GEODETIC_CS = iris.coord_systems.GeogCS(i_cartog.DEFAULT_SPHERICAL_EARTH_RADIUS) def _make_test_cube(shape, xlims, ylims, pole_latlon=None): @@ -83,9 +81,7 @@ def _make_test_cube(shape, xlims, ylims, pole_latlon=None): def _cube_area_sum(cube): """Calculate total area-sum - Iris can't do this in one operation.""" area_sums = cube * i_cartog.area_weights(cube, normalize=False) - area_sum = area_sums.collapsed( - area_sums.coords(dim_coords=True), iris.analysis.SUM - ) + area_sum = area_sums.collapsed(area_sums.coords(dim_coords=True), iris.analysis.SUM) return area_sum.data.flatten()[0] @@ -504,9 +500,7 @@ def test_longitude_wraps(self): ylim2 = 90.0 * (shape2[1] - 1) / shape2[1] xlims_2 = (-xlim2, xlim2) ylims_2 = (-ylim2, ylim2) - c2 = _make_test_cube( - shape2, xlims_2, ylims_2, pole_latlon=(47.4, 25.7) - ) + c2 = _make_test_cube(shape2, xlims_2, ylims_2, pole_latlon=(47.4, 25.7)) # Perform regridding c1toc2 = regrid_conservative_via_esmpy(c1, c2) @@ -622,18 +616,14 @@ def test_fail_different_cs(self): regrid_conservative_via_esmpy(c1, c2) # Replace the coord_system one of the source coords + check this fails. - c1.coord("grid_longitude").coord_system = c2.coord( - "longitude" - ).coord_system + c1.coord("grid_longitude").coord_system = c2.coord("longitude").coord_system with self.assertRaises(ValueError): regrid_conservative_via_esmpy(c1, c2) # Repeat with target coordinate fiddled. c1 = _make_test_cube(shape1, xlims1, ylims1, pole_latlon=(45.0, 35.0)) c2 = _make_test_cube(shape2, xlims2, ylims2) - c2.coord("latitude").coord_system = c1.coord( - "grid_latitude" - ).coord_system + c2.coord("latitude").coord_system = c1.coord("grid_latitude").coord_system with self.assertRaises(ValueError): regrid_conservative_via_esmpy(c1, c2) diff --git a/lib/iris/tests/experimental/test_raster.py b/lib/iris/tests/experimental/test_raster.py index 736263f196..0345ed2595 100644 --- a/lib/iris/tests/experimental/test_raster.py +++ b/lib/iris/tests/experimental/test_raster.py @@ -34,9 +34,7 @@ def check_tiff_header(self, tiff_filename, expect_keys, expect_entries): msg_badval = "Tiff header entry {} has value {} != {}." for key, value in expect_entries.items(): content = im.tag[key] - self.assertEqual( - content, value, msg_badval.format(key, content, value) - ) + self.assertEqual(content, value, msg_badval.format(key, content, value)) def check_tiff(self, cube, header_keys, header_items): # Check that the cube saves correctly to TIFF : diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 3c440264f9..c2b39f1ac5 100755 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -84,9 +84,7 @@ def _output_dir() -> Path: - test_output_dir = Path(__file__).parents[1] / Path( - "result_image_comparison" - ) + test_output_dir = Path(__file__).parents[1] / Path("result_image_comparison") if not os.access(test_output_dir, os.W_OK): if not os.access(Path("."), os.W_OK): @@ -222,9 +220,7 @@ def _create_missing(phash: str) -> None: _create_missing(phash) else: figure.savefig(result_path) - msg = ( - "Bad phash {} with hamming distance {} " "for test {}." - ) + msg = "Bad phash {} with hamming distance {} for test {}." msg = msg.format(phash, distance, test_id) if _DISPLAY_FIGURES: emsg = "Image comparison would have failed: {}" diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index 1c29d4e551..d65adf492b 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -160,9 +160,7 @@ def step_over_diffs(result_dir, display=True): # Creates the diff file when the images aren't identical mcompare.compare_images(reference_image_path, result_path, tol=0) except Exception as e: - if isinstance(e, ValueError) or isinstance( - e, ImageComparisonFailure - ): + if isinstance(e, ValueError) or isinstance(e, ImageComparisonFailure): print(f"Could not compare {result_path}: {e}") continue else: @@ -171,7 +169,9 @@ def step_over_diffs(result_dir, display=True): diff_path = result_dir / Path(f"{result_path.stem}{_POSTFIX_DIFF}") args = phash, reference_image_path, result_path, diff_path if display: - status = f"Image {count_index + 1} of {count}: hamming distance = {distance}" + status = ( + f"Image {count_index + 1} of {count}: hamming distance = {distance}" + ) prefix = test_key, status yield prefix + args else: @@ -181,9 +181,7 @@ def step_over_diffs(result_dir, display=True): if __name__ == "__main__": - default = Path(iris.tests.__file__).parent / Path( - "result_image_comparison" - ) + default = Path(iris.tests.__file__).parent / Path("result_image_comparison") description = "Iris graphic test difference tool." formatter_class = argparse.RawTextHelpFormatter parser = argparse.ArgumentParser( diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py index cd4c83e9b1..174bc041f0 100755 --- a/lib/iris/tests/graphics/recreate_imagerepo.py +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -40,9 +40,7 @@ def update_json(baseline_image_dir: Path, dry_run: bool = False): print(key) print(f"\t{old_val} -> {new_val}") else: - difference = hex_to_hash(str(old_val)) - hex_to_hash( - str(new_val) - ) + difference = hex_to_hash(str(old_val)) - hex_to_hash(str(new_val)) if difference > 0: print(key) print(f"\t{old_val} -> {new_val} ({difference})") @@ -52,18 +50,16 @@ def update_json(baseline_image_dir: Path, dry_run: bool = False): if __name__ == "__main__": - default_baseline_image_dir = Path( - iris.tests.IrisTest.get_data_path("images") + default_baseline_image_dir = Path(iris.tests.IrisTest.get_data_path("images")) + description = ( + "Update imagerepo.json based on contents of the baseline image directory" ) - description = "Update imagerepo.json based on contents of the baseline image directory" formatter_class = argparse.RawTextHelpFormatter parser = argparse.ArgumentParser( description=description, formatter_class=formatter_class ) help = "path to iris tests result image directory (default: %(default)s)" - parser.add_argument( - "--image-dir", default=default_baseline_image_dir, help=help - ) + parser.add_argument("--image-dir", default=default_baseline_image_dir, help=help) help = "dry run (don't actually update imagerepo.json)" parser.add_argument("--dry-run", action="store_true", help=help) args = parser.parse_args() diff --git a/lib/iris/tests/integration/analysis/test_area_weighted.py b/lib/iris/tests/integration/analysis/test_area_weighted.py index 49c80d7bba..0fb2abfcb8 100644 --- a/lib/iris/tests/integration/analysis/test_area_weighted.py +++ b/lib/iris/tests/integration/analysis/test_area_weighted.py @@ -17,9 +17,7 @@ class AreaWeightedTests(tests.IrisTest): def setUp(self): # Prepare a cube and a template - cube_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_xyt.nc"] - ) + cube_file_path = tests.get_data_path(["NetCDF", "regrid", "regrid_xyt.nc"]) self.cube = iris.load_cube(cube_file_path) template_file_path = tests.get_data_path( diff --git a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py index 15f65d52ad..93606b7754 100644 --- a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py @@ -87,9 +87,7 @@ def setUp(self): ] ) - self.derived_coord_name = ( - "sea_surface_height_above_reference_ellipsoid" - ) + self.derived_coord_name = "sea_surface_height_above_reference_ellipsoid" def _check_result(self, cube, expected_result=None, **kwargs): if expected_result is None: @@ -112,16 +110,12 @@ def test_nonlazy_cube_has_lazy_derived(self): # Check same results when key coords are made lazy. cube = self.cube self.assertEqual(cube.coord("depth").has_lazy_points(), False) - self.assertEqual( - cube.coord(self.derived_coord_name).has_lazy_points(), True - ) + self.assertEqual(cube.coord(self.derived_coord_name).has_lazy_points(), True) def test_lazy_cube_same_result(self): cube = self._lazy_testcube() self.assertEqual(cube.coord("depth").has_lazy_points(), True) - self.assertEqual( - cube.coord(self.derived_coord_name).has_lazy_points(), True - ) + self.assertEqual(cube.coord(self.derived_coord_name).has_lazy_points(), True) self._check_result(cube) def test_transpose(self): diff --git a/lib/iris/tests/integration/concatenate/test_concatenate.py b/lib/iris/tests/integration/concatenate/test_concatenate.py index 9bd6bcb0c5..ae32f55e82 100644 --- a/lib/iris/tests/integration/concatenate/test_concatenate.py +++ b/lib/iris/tests/integration/concatenate/test_concatenate.py @@ -89,16 +89,10 @@ class Test_cubes_with_aux_coord(tests.IrisTest): def create_cube(self): data = np.arange(4).reshape(2, 2) - lat = iris.coords.DimCoord( - [0, 30], standard_name="latitude", units="degrees" - ) - lon = iris.coords.DimCoord( - [0, 15], standard_name="longitude", units="degrees" - ) + lat = iris.coords.DimCoord([0, 30], standard_name="latitude", units="degrees") + lon = iris.coords.DimCoord([0, 15], standard_name="longitude", units="degrees") height = iris.coords.AuxCoord([1.5], standard_name="height", units="m") - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="standard" - ) + t_unit = cf_units.Unit("hours since 1970-01-01 00:00:00", calendar="standard") time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") @@ -132,18 +126,10 @@ class Test_cubes_with_cell_measure(tests.IrisTest): def create_cube(self): data = np.arange(4).reshape(2, 2) - lat = iris.coords.DimCoord( - [0, 30], standard_name="latitude", units="degrees" - ) - volume = iris.coords.CellMeasure( - [0, 15], measure="volume", long_name="volume" - ) - area = iris.coords.CellMeasure( - [1.5], standard_name="height", units="m" - ) - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="standard" - ) + lat = iris.coords.DimCoord([0, 30], standard_name="latitude", units="degrees") + volume = iris.coords.CellMeasure([0, 15], measure="volume", long_name="volume") + area = iris.coords.CellMeasure([1.5], standard_name="height", units="m") + t_unit = cf_units.Unit("hours since 1970-01-01 00:00:00", calendar="standard") time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") @@ -177,16 +163,10 @@ class Test_cubes_with_ancillary_variables(tests.IrisTest): def create_cube(self): data = np.arange(4).reshape(2, 2) - lat = iris.coords.DimCoord( - [0, 30], standard_name="latitude", units="degrees" - ) + lat = iris.coords.DimCoord([0, 30], standard_name="latitude", units="degrees") quality = iris.coords.AncillaryVariable([0, 15], long_name="quality") - height = iris.coords.AncillaryVariable( - [1.5], standard_name="height", units="m" - ) - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="standard" - ) + height = iris.coords.AncillaryVariable([1.5], standard_name="height", units="m") + t_unit = cf_units.Unit("hours since 1970-01-01 00:00:00", calendar="standard") time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") @@ -223,9 +203,7 @@ def create_cube(self): # DimCoords sigma = iris.coords.DimCoord([0.0, 10.0], var_name="sigma", units="1") - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="standard" - ) + t_unit = cf_units.Unit("hours since 1970-01-01 00:00:00", calendar="standard") time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) # AtmosphereSigmaFactory (does not span concatenated dim) @@ -238,9 +216,7 @@ def create_cube(self): # HybridHeightFactory (span concatenated dim) delta = iris.coords.AuxCoord(10.0, var_name="delta", units="m") orog = iris.coords.AuxCoord(data, var_name="orog", units="m") - aux_factories.append( - iris.aux_factory.HybridHeightFactory(delta, sigma, orog) - ) + aux_factories.append(iris.aux_factory.HybridHeightFactory(delta, sigma, orog)) dim_coords_and_dims = [(time, 0), (sigma, 1)] aux_coords_and_dims = [ @@ -351,17 +327,11 @@ def test_ignore_diff_air_pressure(self): class Test_anonymous_dims(tests.IrisTest): def setUp(self): data = np.arange(12).reshape(2, 3, 2) - self.cube = iris.cube.Cube( - data, standard_name="air_temperature", units="K" - ) + self.cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") # Time coord - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="standard" - ) - t_coord = iris.coords.DimCoord( - [0, 6], standard_name="time", units=t_unit - ) + t_unit = cf_units.Unit("hours since 1970-01-01 00:00:00", calendar="standard") + t_coord = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) self.cube.add_dim_coord(t_coord, 0) # Lats and lons diff --git a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py index 4ae48fe6f9..2a70ac2d32 100644 --- a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py +++ b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py @@ -51,9 +51,7 @@ def test_nearest(self): def test_nearest_sinusoidal(self): crs = ccrs.Sinusoidal() - res = self.src.regrid( - self.global_grid, ProjectedUnstructuredNearest(crs) - ) + res = self.src.regrid(self.global_grid, ProjectedUnstructuredNearest(crs)) self.assertArrayShapeStats( res, (1, 6, 73, 96), 315.891358296, 11.000639227, rtol=1e-8 ) @@ -61,14 +59,10 @@ def test_nearest_sinusoidal(self): res[:, 0], (1, 73, 96), 299.99993826, 3.9223839688e-5 ) - @unittest.skip( - "Deprecated API and provenance of reference numbers unknown." - ) + @unittest.skip("Deprecated API and provenance of reference numbers unknown.") def test_nearest_gnomonic_uk_domain(self): crs = ccrs.Gnomonic(central_latitude=60.0) - uk_grid = self.global_grid.intersection( - longitude=(-20, 20), latitude=(40, 80) - ) + uk_grid = self.global_grid.intersection(longitude=(-20, 20), latitude=(40, 80)) res = self.src.regrid(uk_grid, ProjectedUnstructuredNearest(crs)) self.assertArrayShapeStats( @@ -91,9 +85,7 @@ def test_nearest_gnomonic_uk_domain(self): [318.92881733, 318.92881733, 318.92881733], ] ) - self.assertArrayAlmostEqual( - expected_subset, res.data[0, 3, 5:8, 4:7].data - ) + self.assertArrayAlmostEqual(expected_subset, res.data[0, 3, 5:8, 4:7].data) def test_nearest_aux_factories(self): src = self.src @@ -143,9 +135,7 @@ def test_linear_sinusoidal(self): self.assertArrayShapeStats( res, (1, 6, 73, 96), 315.8914839, 11.0006338412, rtol=1e-8 ) - self.assertArrayShapeStats( - res[:, 0], (1, 73, 96), 299.99993826, 3.775024069e-5 - ) + self.assertArrayShapeStats(res[:, 0], (1, 73, 96), 299.99993826, 3.775024069e-5) expected_subset = np.array( [ [299.999987, 299.999996, 299.999999], @@ -153,9 +143,7 @@ def test_linear_sinusoidal(self): [299.999973, 299.999977, 299.999982], ] ) - self.assertArrayAlmostEqual( - expected_subset, res.data[0, 0, 20:23, 40:43].data - ) + self.assertArrayAlmostEqual(expected_subset, res.data[0, 0, 20:23, 40:43].data) if __name__ == "__main__": diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py index 1bd39695ec..6f76ab14de 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_load.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_load.py @@ -19,11 +19,7 @@ from iris import Constraint, load from iris.exceptions import IrisCfWarning -from iris.experimental.ugrid.load import ( - PARSE_UGRID_ON_LOAD, - load_mesh, - load_meshes, -) +from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD, load_mesh, load_meshes from iris.experimental.ugrid.mesh import Mesh from iris.tests.stock.netcdf import ( _file_from_cdl_template as create_file_from_cdl_template, @@ -59,9 +55,7 @@ def ugrid_load(uris, constraints=None, callback=None): class TestBasic(tests.IrisTest): def common_test(self, load_filename, assert_filename): cube_list = ugrid_load( - tests.get_data_path( - ["NetCDF", "unstructured_grid", load_filename] - ), + tests.get_data_path(["NetCDF", "unstructured_grid", load_filename]), ) self.assertEqual(1, len(cube_list)) cube = cube_list[0] @@ -133,9 +127,7 @@ def test_multiple_phenomena(self): ["NetCDF", "unstructured_grid", "lfric_surface_mean.nc"] ), ) - self.assertCML( - cube_list, ("experimental", "ugrid", "surface_mean.cml") - ) + self.assertCML(cube_list, ("experimental", "ugrid", "surface_mean.cml")) class TestTolerantLoading(XIOSFileMixin): @@ -154,9 +146,7 @@ def tearDownClass(cls): def create_synthetic_file(self, **create_kwargs): template_name = create_kwargs["template"] # required kwarg testfile_name = "tmp_netcdf" - template_subs = dict( - NUM_NODES=7, NUM_FACES=3, DATASET_NAME=testfile_name - ) + template_subs = dict(NUM_NODES=7, NUM_FACES=3, DATASET_NAME=testfile_name) kwarg_subs = create_kwargs.get("subs", {}) # optional kwarg template_subs.update(kwarg_subs) filepath = create_file_from_cdl_template( diff --git a/lib/iris/tests/integration/experimental/test_ugrid_save.py b/lib/iris/tests/integration/experimental/test_ugrid_save.py index 710ed6941d..02c4f3f852 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_save.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_save.py @@ -46,9 +46,7 @@ def test_example_result_cdls(self): for ex_name, cdl_path in self.example_names_paths.items(): # Create a test netcdf file. target_ncfile_path = str(self.temp_dir / f"{ex_name}.nc") - ncgen_from_cdl( - cdl_str=None, cdl_path=cdl_path, nc_path=target_ncfile_path - ) + ncgen_from_cdl(cdl_str=None, cdl_path=cdl_path, nc_path=target_ncfile_path) # Fill in blank data-variables. _add_standard_data(target_ncfile_path) # Load as Iris data @@ -58,9 +56,7 @@ def test_example_result_cdls(self): resave_ncfile_path = str(self.temp_dir / f"{ex_name}_resaved.nc") iris.save(cubes, resave_ncfile_path) # Check the output against a CDL snapshot. - refdir_relpath = ( - "integration/experimental/ugrid_save/TestBasicSave/" - ) + refdir_relpath = "integration/experimental/ugrid_save/TestBasicSave/" reffile_name = str(Path(cdl_path).name).replace(".nc", ".cdl") reffile_path = refdir_relpath + reffile_name self.assertCDL(resave_ncfile_path, reference_filename=reffile_path) @@ -71,9 +67,7 @@ def test_example_roundtrips(self): for ex_name, cdl_path in self.example_names_paths.items(): # Create a test netcdf file. target_ncfile_path = str(self.temp_dir / f"{ex_name}.nc") - ncgen_from_cdl( - cdl_str=None, cdl_path=cdl_path, nc_path=target_ncfile_path - ) + ncgen_from_cdl(cdl_str=None, cdl_path=cdl_path, nc_path=target_ncfile_path) # Fill in blank data-variables. _add_standard_data(target_ncfile_path) # Load the original as Iris data @@ -104,9 +98,7 @@ def test_example_roundtrips(self): self.assertEqual(orig.location, reloaded.location) orig_mesh = orig.mesh reloaded_mesh = reloaded.mesh - self.assertEqual( - orig_mesh.all_coords, reloaded_mesh.all_coords - ) + self.assertEqual(orig_mesh.all_coords, reloaded_mesh.all_coords) self.assertEqual( orig_mesh.all_connectivities, reloaded_mesh.all_connectivities, diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index a37f1eef07..41893ac948 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -227,8 +227,7 @@ def arg_coords(arg, name, unit, vals=None): vals = np.arange(n_flds + 2) # Note allowance vals = arg_vals(arg, vals) coords = [ - None if val is None else DimCoord([val], units=unit) - for val in vals + None if val is None else DimCoord([val], units=unit) for val in vals ] # Apply names separately, as 'pressure' is not a standard name. for coord in coords: @@ -321,9 +320,7 @@ def test_stash_constraint(self): file = self.save_fieldcubes(flds) airtemp_flds = [fld for fld in flds if fld.name() == "air_temperature"] stash_attribute = airtemp_flds[0].attributes["STASH"] - results = iris.load( - file, iris.AttributeConstraint(STASH=stash_attribute) - ) + results = iris.load(file, iris.AttributeConstraint(STASH=stash_attribute)) expected = CubeList(airtemp_flds).merge() self.assertEqual(results, expected) @@ -529,19 +526,13 @@ def test_FAIL_scalar_vector_concatenate(self): # We'd really like to fix this one... (single_timepoint_fld,) = self.fields(c_t="1") multi_timepoint_flds = self.fields(c_t="23") - file_single = self.save_fieldcubes( - [single_timepoint_fld], basename="single" - ) - file_multi = self.save_fieldcubes( - multi_timepoint_flds, basename="multi" - ) + file_single = self.save_fieldcubes([single_timepoint_fld], basename="single") + file_multi = self.save_fieldcubes(multi_timepoint_flds, basename="multi") results = iris.load((file_single, file_multi)) if not self.do_fast_loads: # This is what we'd LIKE to get (what iris.load gives). - expected = CubeList( - multi_timepoint_flds + [single_timepoint_fld] - ).merge() + expected = CubeList(multi_timepoint_flds + [single_timepoint_fld]).merge() else: # This is what we ACTUALLY get at present. # It can't combine the scalar and vector time coords. @@ -576,9 +567,7 @@ def test_FAIL_phenomena_nostash(self): # It's a bit tricky to arrange the existing data like that. # Do it by hacking the time values to allow merge, and then fixing # up the time - old_t1, old_t2 = ( - fld.coord("time").points[0] for fld in (flds[0], flds[2]) - ) + old_t1, old_t2 = (fld.coord("time").points[0] for fld in (flds[0], flds[2])) for i_fld, fld in enumerate(flds): # Hack the phenomena to all look like the first one. fld.rename("air_temperature") @@ -657,17 +646,13 @@ class TestCallDetails__Fast(Mixin_FieldTest, MixinCallDetails, tests.IrisTest): do_fast_loads = True -class TestDimsAndOrdering__Iris( - Mixin_FieldTest, MixinDimsAndOrdering, tests.IrisTest -): +class TestDimsAndOrdering__Iris(Mixin_FieldTest, MixinDimsAndOrdering, tests.IrisTest): # Finally, an actual test-class (unittest.TestCase) : # run the 'dimensions and ordering' tests with *normal* loading. do_fast_loads = False -class TestDimsAndOrdering__Fast( - Mixin_FieldTest, MixinDimsAndOrdering, tests.IrisTest -): +class TestDimsAndOrdering__Fast(Mixin_FieldTest, MixinDimsAndOrdering, tests.IrisTest): # Finally, an actual test-class (unittest.TestCase) : # run the 'dimensions and ordering' tests with *FAST* loading. do_fast_loads = True diff --git a/lib/iris/tests/integration/netcdf/test__dask_locks.py b/lib/iris/tests/integration/netcdf/test__dask_locks.py index 70891bc40c..f711e68820 100644 --- a/lib/iris/tests/integration/netcdf/test__dask_locks.py +++ b/lib/iris/tests/integration/netcdf/test__dask_locks.py @@ -108,7 +108,4 @@ def test_get_worker_lock(dask_scheduler): assert result.name == test_identity else: # low-level object doesn't have a readily available class for isinstance - assert all( - hasattr(result, att) - for att in ("acquire", "release", "locked") - ) + assert all(hasattr(result, att) for att in ("acquire", "release", "locked")) diff --git a/lib/iris/tests/integration/netcdf/test_attributes.py b/lib/iris/tests/integration/netcdf/test_attributes.py index aab91bcb31..6cc7ae3052 100644 --- a/lib/iris/tests/integration/netcdf/test_attributes.py +++ b/lib/iris/tests/integration/netcdf/test_attributes.py @@ -98,9 +98,7 @@ def test_patching_conventions_attribute(self): self.assertEqual( res.attributes["Conventions"], - "{}, {}, {}".format( - CF_CONVENTIONS_VERSION, "convention1", "convention2" - ), + "{}, {}, {}".format(CF_CONVENTIONS_VERSION, "convention1", "convention2"), ) diff --git a/lib/iris/tests/integration/netcdf/test_aux_factories.py b/lib/iris/tests/integration/netcdf/test_aux_factories.py index 6b3dde6fd1..a0c2ec5992 100644 --- a/lib/iris/tests/integration/netcdf/test_aux_factories.py +++ b/lib/iris/tests/integration/netcdf/test_aux_factories.py @@ -81,9 +81,7 @@ def test_save_load_loop(self): iris.save(self.cube, filename) cube = iris.load_cube(filename, "air_potential_temperature") iris.save(cube, other_filename) - other_cube = iris.load_cube( - other_filename, "air_potential_temperature" - ) + other_cube = iris.load_cube(other_filename, "air_potential_temperature") self.assertEqual(cube, other_cube) @@ -92,17 +90,13 @@ class TestSaveMultipleAuxFactories(tests.IrisTest): def test_hybrid_height_and_pressure(self): cube = stock.realistic_4d() cube.add_aux_coord( - iris.coords.DimCoord( - 1200.0, long_name="level_pressure", units="hPa" - ) + iris.coords.DimCoord(1200.0, long_name="level_pressure", units="hPa") ) cube.add_aux_coord( iris.coords.DimCoord(0.5, long_name="other sigma", units="1") ) cube.add_aux_coord( - iris.coords.DimCoord( - 1000.0, long_name="surface_air_pressure", units="hPa" - ) + iris.coords.DimCoord(1000.0, long_name="surface_air_pressure", units="hPa") ) factory = iris.aux_factory.HybridPressureFactory( cube.coord("level_pressure"), @@ -123,9 +117,7 @@ def test_shared_primary(self): ) factory.rename("another altitude") cube.add_aux_factory(factory) - with self.temp_filename( - suffix=".nc" - ) as filename, self.assertRaisesRegex( + with self.temp_filename(suffix=".nc") as filename, self.assertRaisesRegex( ValueError, "multiple aux factories" ): iris.save(cube, filename) diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index 177e9ce325..c294891cb6 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -112,9 +112,7 @@ def fix_array(array): fix_array(np.zeros(ancil_shape)), long_name="sample_ancil" ) cube.add_ancillary_variable(ancil, ancil_dims) - cm = CellMeasure( - fix_array(np.zeros(cm_shape)), long_name="sample_cm" - ) + cm = CellMeasure(fix_array(np.zeros(cm_shape)), long_name="sample_cm") cube.add_cell_measure(cm, cm_dims) return cube @@ -133,9 +131,7 @@ def test_realfile_loadsave_equivalence(self, save_is_delayed, output_path): cube.attributes["Conventions"] = "CF-1.7" original_cubes = sorted(original_cubes, key=lambda cube: cube.name()) - result = iris.save( - original_cubes, output_path, compute=not save_is_delayed - ) + result = iris.save(original_cubes, output_path, compute=not save_is_delayed) if save_is_delayed: # In this case, must also "complete" the save. result.compute() @@ -172,9 +168,7 @@ def scheduler_type(cls, request): if config_name == "distributed": _distributed_client.close() - def test_scheduler_types( - self, output_path, scheduler_type, save_is_delayed - ): + def test_scheduler_types(self, output_path, scheduler_type, save_is_delayed): # Check operation works and behaves the same with different schedulers, # especially including distributed. @@ -205,9 +199,7 @@ def test_scheduler_types( with warnings.catch_warnings(record=True) as logged_warnings: # The compute *returns* warnings from the delayed operations. issued_warnings = result.compute() - issued_warnings = [ - log.message for log in logged_warnings - ] + issued_warnings + issued_warnings = [log.message for log in logged_warnings] + issued_warnings warning_messages = [warning.args[0] for warning in issued_warnings] if scheduler_type == "DistributedScheduler": @@ -229,9 +221,7 @@ def test_scheduler_types( expected_msg = "contains unmasked data points equal to the fill-value" assert all(expected_msg in message for message in warning_messages) - def test_time_of_writing( - self, save_is_delayed, output_path, scheduler_type - ): + def test_time_of_writing(self, save_is_delayed, output_path, scheduler_type): # Check when lazy data is *actually* written : # - in 'immediate' mode, on initial file write # - in 'delayed' mode, only when the delayed-write is computed. @@ -249,9 +239,7 @@ def test_time_of_writing( assert save_is_delayed == (result is not None) # Read back : NOTE avoid loading the separate surface-altitude cube. - readback_cube = iris.load_cube( - output_path, "air_potential_temperature" - ) + readback_cube = iris.load_cube(output_path, "air_potential_temperature") # Check the components to be tested *are* lazy. See: self.all_vars_lazy(). assert readback_cube.has_lazy_data() assert readback_cube.coord("surface_altitude").has_lazy_points() @@ -260,9 +248,7 @@ def test_time_of_writing( # If 'delayed', the lazy content should all be masked, otherwise none of it. def getmask(cube_or_coord): - cube_or_coord = ( - cube_or_coord.copy() - ) # avoid realising the original + cube_or_coord = cube_or_coord.copy() # avoid realising the original if hasattr(cube_or_coord, "points"): data = cube_or_coord.points else: @@ -315,9 +301,7 @@ def test_fill_warnings(self, warning_type, output_path, save_is_delayed): if warning_type == "WarnFillvalueCollision": make_fv_collide = True make_maskedbytes = False - expected_msg = ( - "contains unmasked data points equal to the fill-value" - ) + expected_msg = "contains unmasked data points equal to the fill-value" else: assert warning_type == "WarnMaskedBytes" make_fv_collide = False @@ -344,16 +328,12 @@ def test_fill_warnings(self, warning_type, output_path, save_is_delayed): # Complete the operation now with warnings.catch_warnings(): # NOTE: warnings should *not* be issued here, instead they are returned. - warnings.simplefilter( - "error", category=IrisSaverFillValueWarning - ) + warnings.simplefilter("error", category=IrisSaverFillValueWarning) result_warnings = result.compute() # Either way, we should now have 2 similar warnings. assert len(result_warnings) == 2 - assert all( - expected_msg in warning.args[0] for warning in result_warnings - ) + assert all(expected_msg in warning.args[0] for warning in result_warnings) def test_no_delayed_writes(self, output_path): # Just check that a delayed save returns a usable 'delayed' object, even when diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py index 0fc619e4cb..673b988557 100644 --- a/lib/iris/tests/integration/netcdf/test_general.py +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -42,9 +42,7 @@ def test_lazy_preserved_save(self): ("NetCDF", "label_and_climate", "small_FC_167_mon_19601101.nc") ) # While loading, "turn off" loading small variables as real data. - with mock.patch( - "iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0 - ): + with mock.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0): acube = iris.load_cube(fpath, "air_temperature") self.assertTrue(acube.has_lazy_data()) # Also check a coord with lazy points + bounds. @@ -140,9 +138,7 @@ def test_unknown_method(self): warning_messages = [ warn for warn in warning_messages - if isinstance( - warn, iris.exceptions.IrisUnknownCellMethodWarning - ) + if isinstance(warn, iris.exceptions.IrisUnknownCellMethodWarning) ] self.assertEqual(len(warning_messages), 1) message = warning_messages[0].args[0] @@ -207,9 +203,7 @@ def _single_test(self, datatype, CDLfilename, manual=False): decimal = int(-np.log10(scale_factor)) packedcube = iris.load_cube(file_out) # Check that packed cube is accurate to expected precision - self.assertArrayAlmostEqual( - cube.data, packedcube.data, decimal=decimal - ) + self.assertArrayAlmostEqual(cube.data, packedcube.data, decimal=decimal) # Check the netCDF file against CDL expected output. self.assertCDL( file_out, @@ -361,9 +355,7 @@ def create_nc_file(self, tmp_path): def test_lat_not_loaded(self): # iris#5068 includes discussion of possible retention of the skipped # coords in the future. - with pytest.warns( - match="Missing data dimensions for multi-valued DimCoord" - ): + with pytest.warns(match="Missing data dimensions for multi-valued DimCoord"): cube = iris.load_cube(self.nc_path) with pytest.raises(iris.exceptions.CoordinateNotFoundError): _ = cube.coord("lat") @@ -434,9 +426,7 @@ def test_basic_save(self): nc_dataset = nc.Dataset(filepath_indirect, "w") # NOTE: we **must** use delayed saving here, as we cannot do direct saving to # a user-owned dataset. - result = iris.save( - self.testdata, nc_dataset, saver="nc", compute=False - ) + result = iris.save(self.testdata, nc_dataset, saver="nc", compute=False) # Do some very basic sanity checks on the resulting Dataset. # It should still be open (!) diff --git a/lib/iris/tests/integration/netcdf/test_self_referencing.py b/lib/iris/tests/integration/netcdf/test_self_referencing.py index 4e5da18bbd..7f52f722ae 100644 --- a/lib/iris/tests/integration/netcdf/test_self_referencing.py +++ b/lib/iris/tests/integration/netcdf/test_self_referencing.py @@ -68,9 +68,7 @@ def setUp(self): latitudes = dataset.createVariable("lat", np.float64, ("lat",)) longitudes = dataset.createVariable("lon", np.float64, ("lon",)) levels = dataset.createVariable("lev", np.float64, ("lev",)) - volcello = dataset.createVariable( - "volcello", np.float32, ("lat", "lon", "lev") - ) + volcello = dataset.createVariable("volcello", np.float32, ("lat", "lon", "lev")) latitudes.standard_name = "latitude" latitudes.units = "degrees_north" @@ -115,9 +113,7 @@ def test_self_referencing_load_issue_3367(self): with mock.patch("warnings.warn") as warn: # ensure file loads without failure cube = iris.load_cube(self.temp_dir_path) - warn.assert_called_with( - expected_msg, category=IrisCfMissingVarWarning - ) + warn.assert_called_with(expected_msg, category=IrisCfMissingVarWarning) # extra check to ensure correct variable was found assert cube.standard_name == "ocean_volume" diff --git a/lib/iris/tests/integration/plot/test_colorbar.py b/lib/iris/tests/integration/plot/test_colorbar.py index c742564c7d..82b406abbf 100644 --- a/lib/iris/tests/integration/plot/test_colorbar.py +++ b/lib/iris/tests/integration/plot/test_colorbar.py @@ -21,14 +21,7 @@ if tests.MPL_AVAILABLE: import matplotlib.pyplot as plt - from iris.plot import ( - contour, - contourf, - pcolor, - pcolormesh, - points, - scatter, - ) + from iris.plot import contour, contourf, pcolor, pcolormesh, points, scatter @tests.skip_plot @@ -87,19 +80,13 @@ def test_points_with_c_kwarg_specified_mappable(self): self.assertIs(cbar.mappable, mappable_initial) def test_scatter_with_c_kwarg(self): - mappable = scatter( - self.traj_lon, self.traj_lat, c=self.traj_lon.points - ) + mappable = scatter(self.traj_lon, self.traj_lat, c=self.traj_lon.points) cbar = plt.colorbar() self.assertIs(cbar.mappable, mappable) def test_scatter_with_c_kwarg_specified_mappable(self): - mappable_initial = scatter( - self.traj_lon, self.traj_lat, c=self.traj_lon.points - ) - _ = scatter( - self.traj_lon, self.traj_lat, c=self.traj_lon.points, cmap="cool" - ) + mappable_initial = scatter(self.traj_lon, self.traj_lat, c=self.traj_lon.points) + _ = scatter(self.traj_lon, self.traj_lat, c=self.traj_lon.points, cmap="cool") cbar = plt.colorbar(mappable_initial) self.assertIs(cbar.mappable, mappable_initial) diff --git a/lib/iris/tests/integration/plot/test_nzdateline.py b/lib/iris/tests/integration/plot/test_nzdateline.py index 2c9360e9ea..b6d12d805a 100644 --- a/lib/iris/tests/integration/plot/test_nzdateline.py +++ b/lib/iris/tests/integration/plot/test_nzdateline.py @@ -31,8 +31,7 @@ def test_dateline(self): # This is set in longitudes with the datum set to the # International Date Line. self.assertTrue( - -10 < plt.gca().get_xlim()[0] < -5 - and 5 < plt.gca().get_xlim()[1] < 10 + -10 < plt.gca().get_xlim()[0] < -5 and 5 < plt.gca().get_xlim()[1] < 10 ) diff --git a/lib/iris/tests/integration/plot/test_vector_plots.py b/lib/iris/tests/integration/plot/test_vector_plots.py index 652a205fd8..4c30753dee 100644 --- a/lib/iris/tests/integration/plot/test_vector_plots.py +++ b/lib/iris/tests/integration/plot/test_vector_plots.py @@ -118,9 +118,7 @@ def test_2d_plain_latlon(self): # Test 2d vector plotting with implicit (PlateCarree) coord system. u_cube, v_cube = self._latlon_uv_cubes(sample_2d_latlons()) ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=180)) - self.plot( - "latlon_2d", u_cube, v_cube, coords=("longitude", "latitude") - ) + self.plot("latlon_2d", u_cube, v_cube, coords=("longitude", "latitude")) ax.coastlines(resolution="110m", color="red") ax.set_global() self.check_graphic() @@ -129,9 +127,7 @@ def test_2d_plain_latlon_on_polar_map(self): # Test 2d vector plotting onto a different projection. u_cube, v_cube = self._latlon_uv_cubes(sample_2d_latlons()) ax = plt.axes(projection=ccrs.NorthPolarStereo()) - self.plot( - "latlon_2d_polar", u_cube, v_cube, coords=("longitude", "latitude") - ) + self.plot("latlon_2d_polar", u_cube, v_cube, coords=("longitude", "latitude")) ax.coastlines(resolution="110m", color="red") self.check_graphic() @@ -139,9 +135,7 @@ def test_2d_rotated_latlon(self): # Test plotting vectors in a rotated latlon coord system. u_cube, v_cube = self._latlon_uv_cubes(sample_2d_latlons(rotated=True)) ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=180)) - self.plot( - "2d_rotated", u_cube, v_cube, coords=("longitude", "latitude") - ) + self.plot("2d_rotated", u_cube, v_cube, coords=("longitude", "latitude")) ax.coastlines(resolution="110m", color="red") ax.set_global() self.check_graphic() @@ -158,16 +152,12 @@ def test_fail_unsupported_coord_system(self): r"This .* translates as Cartopy \+proj=merc .*" ) with self.assertRaisesRegex(ValueError, re_msg): - self.plot( - "2d_rotated", u_cube, v_cube, coords=("longitude", "latitude") - ) + self.plot("2d_rotated", u_cube, v_cube, coords=("longitude", "latitude")) def test_circular_longitude(self): # Test circular longitude does not cause a crash. res = 5 - lat = DimCoord( - np.arange(-90, 91, res), "latitude", units="degrees_north" - ) + lat = DimCoord(np.arange(-90, 91, res), "latitude", units="degrees_north") lon = DimCoord( np.arange(0, 360, res), "longitude", diff --git a/lib/iris/tests/integration/test_cube.py b/lib/iris/tests/integration/test_cube.py index 8f3ac5fb48..d0267e4263 100644 --- a/lib/iris/tests/integration/test_cube.py +++ b/lib/iris/tests/integration/test_cube.py @@ -25,12 +25,8 @@ def test_agg_by_aux_coord(self): ("NetCDF", "testing", "small_theta_colpex.nc") ) # While loading, "turn off" loading small variables as real data. - with mock.patch( - "iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0 - ): - cube = iris.load_cube( - problem_test_file, "air_potential_temperature" - ) + with mock.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0): + cube = iris.load_cube(problem_test_file, "air_potential_temperature") # Test aggregating by aux coord, notably the `forecast_period` aux # coord on `cube`, whose `_points` attribute is a lazy array. diff --git a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py index b09b408827..ba14327c9b 100644 --- a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py +++ b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py @@ -142,9 +142,7 @@ def check_captured_warnings( if allow_possible_legacy_warning: # Remove any unused "legacy attribute saving" key. # N.B. this is the *only* key we will tolerate not being used. - expected_keys = [ - key for key in expected_keys if key != legacy_message_key - ] + expected_keys = [key for key in expected_keys if key != legacy_message_key] assert set(found_results) == set(expected_keys) @@ -236,9 +234,7 @@ def create_testcase_files_or_cubes( filepath1 = self._testfile_path("testfile") filepath2 = self._testfile_path("testfile2") - def make_file( - filepath: str, global_value=None, var_values=None - ) -> str: + def make_file(filepath: str, global_value=None, var_values=None) -> str: ds = threadsafe_nc4.DatasetWrapper(filepath, "w") if global_value is not None: ds.setncattr(attr_name, global_value) @@ -281,13 +277,9 @@ def make_cubes(var_name, global_value=None, var_values=None): if cubes: results = make_cubes("v1", global_value_file1, var_values_file1) if global_value_file2 is not None or var_values_file2 is not None: - results.extend( - make_cubes("v2", global_value_file2, var_values_file2) - ) + results.extend(make_cubes("v2", global_value_file2, var_values_file2)) else: - results = [ - make_file(filepath1, global_value_file1, var_values_file1) - ] + results = [make_file(filepath1, global_value_file1, var_values_file1)] if global_value_file2 is not None or var_values_file2 is not None: # Make a second testfile and add it to files-to-be-loaded. results.append( @@ -395,9 +387,7 @@ def fetch_results( try: ds = threadsafe_nc4.DatasetWrapper(filepath) global_result = ( - ds.getncattr(attr_name) - if attr_name in ds.ncattrs() - else None + ds.getncattr(attr_name) if attr_name in ds.ncattrs() else None ) # Fetch local attr value from all data variables : In our testcases, # that is all *except* dimcoords (ones named after dimensions). @@ -440,16 +430,14 @@ def fetch_results( # Return a result-set for each occurring global value (possibly # including a 'None'). global_values = set( - cube.attributes.globals.get(attr_name, None) - for cube in cubes + cube.attributes.globals.get(attr_name, None) for cube in cubes ) results = [ [globalval] + [ cube.attributes.locals.get(attr_name, None) for cube in cubes - if cube.attributes.globals.get(attr_name, None) - == globalval + if cube.attributes.globals.get(attr_name, None) == globalval ] for globalval in sorted(global_values, key=str) ] @@ -524,9 +512,7 @@ def fetch_results( "STASH", "um_stash_source", ] -_MATRIX_ATTRNAMES = [ - attr for attr in _MATRIX_ATTRNAMES if attr not in _SPECIAL_ATTRS -] +_MATRIX_ATTRNAMES = [attr for attr in _MATRIX_ATTRNAMES if attr not in _SPECIAL_ATTRS] # @@ -584,8 +570,7 @@ def encode_matrix_result(results: List[List[str]]) -> List[str]: if not isinstance(results[0], list): results = [results] assert all( - all(val is None or isinstance(val, str) for val in vals) - for vals in results + all(val is None or isinstance(val, str) for val in vals) for vals in results ) # Translate "None" values to "-" @@ -609,15 +594,11 @@ def valrep(val): @pytest.fixture(autouse=True, scope="session") def matrix_results(): matrix_filepaths = { - testtype: ( - Path(__file__).parent / f"attrs_matrix_results_{testtype}.json" - ) + testtype: (Path(__file__).parent / f"attrs_matrix_results_{testtype}.json") for testtype in _MATRIX_TESTTYPES } # An environment variable can trigger saving of the results. - save_matrix_results = bool( - int(os.environ.get("SAVEALL_MATRIX_RESULTS", "0")) - ) + save_matrix_results = bool(int(os.environ.get("SAVEALL_MATRIX_RESULTS", "0"))) matrix_results = {} for testtype in _MATRIX_TESTTYPES: @@ -692,9 +673,7 @@ class TestRoundtrip(MixinAttrsTesting): """ # Parametrise all tests over split/unsplit saving. - @pytest.fixture( - params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True - ) + @pytest.fixture(params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True) def do_split(self, request): do_split = request.param self.save_split_attrs = do_split @@ -721,9 +700,7 @@ def run_roundtrip_testcase(self, attr_name, values): cubes = sorted(cubes, key=lambda cube: cube.name()) do_split = getattr(self, "save_split_attrs", False) kwargs = ( - dict(save_split_attrs=do_split) - if _SPLIT_SAVE_SUPPORTED - else dict() + dict(save_split_attrs=do_split) if _SPLIT_SAVE_SUPPORTED else dict() ) with iris.FUTURE.context(**kwargs): iris.save(cubes, self.result_filepath) @@ -760,9 +737,7 @@ def check_roundtrip_results(self, expected, expected_warnings=None): # def test_01_userstyle_single_global(self): - self.run_roundtrip_testcase( - attr_name="myname", values=["single-value", None] - ) + self.run_roundtrip_testcase(attr_name="myname", values=["single-value", None]) # Default behaviour for a general global user-attribute. # It simply remains global. self.check_roundtrip_results(["single-value", None]) @@ -1041,9 +1016,7 @@ def test_16_localstyle(self, local_attr, origin_style, do_split): attrval = "p r o c e s s" expect_var = attrval - if local_attr == "STASH" and ( - origin_style == "input_local" or not do_split - ): + if local_attr == "STASH" and (origin_style == "input_local" or not do_split): # A special case, output translates this to a different attribute name. self.attrname = "um_stash_source" @@ -1055,9 +1028,7 @@ def test_16_localstyle(self, local_attr, origin_style, do_split): @pytest.mark.parametrize("testcase", _MATRIX_TESTCASES[:max_param_attrs]) @pytest.mark.parametrize("attrname", _MATRIX_ATTRNAMES) - def test_roundtrip_matrix( - self, testcase, attrname, matrix_results, do_split - ): + def test_roundtrip_matrix(self, testcase, attrname, matrix_results, do_split): do_saves, matrix_results = matrix_results split_param = "split" if do_split else "unsplit" testcase_spec = matrix_results["roundtrip"][testcase] @@ -1117,9 +1088,7 @@ def check_load_results(self, expected, oldstyle_combined=False): # def test_01_userstyle_single_global(self): - self.run_load_testcase( - attr_name="myname", values=["single_value", None, None] - ) + self.run_load_testcase(attr_name="myname", values=["single_value", None, None]) # Legacy-equivalent result check (single attributes dict per cube) self.check_load_results( [None, "single_value", "single_value"], @@ -1135,9 +1104,7 @@ def test_02_userstyle_single_local(self): attr_name="myname", # A generic "user" attribute with no special handling values=[None, "single-value", None], ) - self.check_load_results( - [None, "single-value", None], oldstyle_combined=True - ) + self.check_load_results([None, "single-value", None], oldstyle_combined=True) self.check_load_results([None, "single-value", None]) def test_03_userstyle_multiple_different(self): @@ -1199,9 +1166,7 @@ def test_08_conventions_var_both(self): values=["global-setting", "local-setting"], ) # (#1): legacy result : the global version gets lost. - self.check_load_results( - [None, "local-setting"], oldstyle_combined=True - ) + self.check_load_results([None, "local-setting"], oldstyle_combined=True) # (#2): newstyle results : retain both. self.check_load_results(["global-setting", "local-setting"]) @@ -1212,9 +1177,7 @@ def test_08_conventions_var_both(self): def test_09_globalstyle__global(self, global_attr): attr_content = f"Global tracked {global_attr}" - self.run_load_testcase( - attr_name=global_attr, values=[attr_content, None] - ) + self.run_load_testcase(attr_name=global_attr, values=[attr_content, None]) # (#1) legacy self.check_load_results([None, attr_content], oldstyle_combined=True) # (#2) newstyle : global status preserved. @@ -1335,9 +1298,7 @@ def test_16_localstyle(self, local_attr, origin_style): @pytest.mark.parametrize("testcase", _MATRIX_TESTCASES[:max_param_attrs]) @pytest.mark.parametrize("attrname", _MATRIX_ATTRNAMES) @pytest.mark.parametrize("resultstyle", _MATRIX_LOAD_RESULTSTYLES) - def test_load_matrix( - self, testcase, attrname, matrix_results, resultstyle - ): + def test_load_matrix(self, testcase, attrname, matrix_results, resultstyle): do_saves, matrix_results = matrix_results testcase_spec = matrix_results["load"][testcase] input_spec = testcase_spec["input"] @@ -1347,9 +1308,7 @@ def test_load_matrix( result_cubes = iris.load(self.input_filepaths) do_combined = resultstyle == "legacy" - results = self.fetch_results( - cubes=result_cubes, oldstyle_combined=do_combined - ) + results = self.fetch_results(cubes=result_cubes, oldstyle_combined=do_combined) result_spec = encode_matrix_result(results) attr_style = deduce_attr_style(attrname) @@ -1368,9 +1327,7 @@ class TestSave(MixinAttrsTesting): """ # Parametrise all tests over split/unsplit saving. - @pytest.fixture( - params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True - ) + @pytest.fixture(params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True) def do_split(self, request): do_split = request.param self.save_split_attrs = do_split @@ -1389,9 +1346,7 @@ def run_save_testcase(self, attr_name: str, values: list): self.result_filepath = self._testfile_path("result") do_split = getattr(self, "save_split_attrs", False) kwargs = ( - dict(save_split_attrs=do_split) - if _SPLIT_SAVE_SUPPORTED - else dict() + dict(save_split_attrs=do_split) if _SPLIT_SAVE_SUPPORTED else dict() ) with iris.FUTURE.context(**kwargs): iris.save(self.input_cubes, self.result_filepath) @@ -1411,9 +1366,7 @@ def run_save_testcase_legacytype(self, attr_name: str, values: list): self.run_save_testcase(attr_name, [None] + values) - def check_save_results( - self, expected: list, expected_warnings: List[str] = None - ): + def check_save_results(self, expected: list, expected_warnings: List[str] = None): results = self.fetch_results(filepath=self.result_filepath) assert results == expected check_captured_warnings( @@ -1463,16 +1416,12 @@ def test_Conventions__single(self): self.check_save_results(["CF-1.7", None]) def test_Conventions__multiple_same(self): - self.run_save_testcase_legacytype( - "Conventions", ["same-value", "same-value"] - ) + self.run_save_testcase_legacytype("Conventions", ["same-value", "same-value"]) # Always discarded + replaced by a single global setting. self.check_save_results(["CF-1.7", None, None]) def test_Conventions__multiple_different(self): - self.run_save_testcase_legacytype( - "Conventions", ["value-A", "value-B"] - ) + self.run_save_testcase_legacytype("Conventions", ["value-A", "value-B"]) # Always discarded + replaced by a single global setting. self.check_save_results(["CF-1.7", None, None]) @@ -1518,9 +1467,7 @@ def test_globalstyle__multiple_different(self, global_attr): def test_globalstyle__multiple_onemissing(self, global_attr): # Multiple global-type, with one missing, behave like different values. - self.run_save_testcase_legacytype( - global_attr, ["value", "value", None] - ) + self.run_save_testcase_legacytype(global_attr, ["value", "value", None]) # Stored as locals when there are differing values. msg_regexp = ( f"'{global_attr}' is being added as CF data variable attribute," @@ -1546,9 +1493,7 @@ def test_localstyle__single(self, local_attr): self.check_save_results(expected_results) def test_localstyle__multiple_same(self, local_attr): - self.run_save_testcase_legacytype( - local_attr, ["value-same", "value-same"] - ) + self.run_save_testcase_legacytype(local_attr, ["value-same", "value-same"]) # They remain separate + local expected_results = [None, "value-same", "value-same"] @@ -1597,9 +1542,7 @@ def test_globallocal_clashing(self, do_split): def test_globallocal_oneeach_same(self, do_split): # One cube with global attr, another with identical local one. - self.run_save_testcase( - "userattr", values=[[None, "value"], ["value", None]] - ) + self.run_save_testcase("userattr", values=[[None, "value"], ["value", None]]) if do_split: expected = [None, "value", "value"] expected_warning = ( @@ -1614,13 +1557,9 @@ def test_globallocal_oneeach_same(self, do_split): def test_globallocal_oneeach_different(self, do_split): # One cube with global attr, another with a *different* local one. - self.run_save_testcase( - "userattr", [[None, "valueA"], ["valueB", None]] - ) + self.run_save_testcase("userattr", [[None, "valueA"], ["valueB", None]]) if do_split: - warning = ( - r"Saving the cube global attributes \['userattr'\] as local" - ) + warning = r"Saving the cube global attributes \['userattr'\] as local" else: # N.B. legacy code does not warn of global-to-local "demotion". warning = None diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py index bab925bd7e..e8dd367187 100644 --- a/lib/iris/tests/integration/test_pp.py +++ b/lib/iris/tests/integration/test_pp.py @@ -31,7 +31,7 @@ def _test_coord(self, cube, point, bounds=None, **kwargs): self.assertEqual( len(coords), 1, - "failed to find exactly one coord" " using: {}".format(kwargs), + "failed to find exactly one coord using: {}".format(kwargs), ) self.assertEqual(coords[0].points, point) if bounds is not None: @@ -102,9 +102,7 @@ def test_soil_depth_round_trip(self): cube = next(iris.fileformats.pp.load_cubes("DUMMY")) self.assertIn("soil", cube.standard_name) - self._test_coord( - cube, point, bounds=[lower, upper], standard_name="depth" - ) + self._test_coord(cube, point, bounds=[lower, upper], standard_name="depth") # Now use the save rules to convert the Cube back into a PPField. field = iris.fileformats.pp.PPField3() @@ -132,9 +130,7 @@ def test_potential_temperature_level_round_trip(self): with mock.patch("iris.fileformats.pp.load", new=load): cube = next(iris.fileformats.pp.load_cubes("DUMMY")) - self._test_coord( - cube, potm_value, standard_name="air_potential_temperature" - ) + self._test_coord(cube, potm_value, standard_name="air_potential_temperature") # Now use the save rules to convert the Cube back into a PPField. field = iris.fileformats.pp.PPField3() @@ -209,9 +205,7 @@ def test_hybrid_pressure_round_trip(self): self.assertEqual(pressure_cube.units, "Pa") # Check the data cube is set up to use hybrid-pressure. - self._test_coord( - data_cube, model_level, standard_name="model_level_number" - ) + self._test_coord(data_cube, model_level, standard_name="model_level_number") self._test_coord( data_cube, delta, @@ -285,9 +279,9 @@ def test_hybrid_pressure_with_duplicate_references(self): return_value=iter([data_field, pressure_field, pressure_field]) ) msg = "Multiple reference cubes for surface_air_pressure" - with mock.patch( - "iris.fileformats.pp.load", new=load - ) as load, mock.patch("warnings.warn") as warn: + with mock.patch("iris.fileformats.pp.load", new=load) as load, mock.patch( + "warnings.warn" + ) as warn: _, _, _ = iris.fileformats.pp.load_cubes("DUMMY") warn.assert_called_with(msg, category=IrisUserWarning) @@ -320,9 +314,7 @@ def test_hybrid_height_with_non_standard_coords(self): cube.add_aux_coord(sigma_coord) cube.add_aux_coord(surface_altitude_coord, (0, 1)) cube.add_aux_factory( - HybridHeightFactory( - delta_coord, sigma_coord, surface_altitude_coord - ) + HybridHeightFactory(delta_coord, sigma_coord, surface_altitude_coord) ) field = iris.fileformats.pp.PPField3() @@ -367,9 +359,7 @@ def test_hybrid_pressure_with_non_standard_coords(self): cube.add_aux_coord(sigma_coord) cube.add_aux_coord(surface_air_pressure_coord, (0, 1)) cube.add_aux_factory( - HybridPressureFactory( - delta_coord, sigma_coord, surface_air_pressure_coord - ) + HybridPressureFactory(delta_coord, sigma_coord, surface_air_pressure_coord) ) field = iris.fileformats.pp.PPField3() @@ -405,9 +395,9 @@ def test_hybrid_height_round_trip_no_reference(self): # Convert field to a cube. load = mock.Mock(return_value=iter([data_field])) - with mock.patch( - "iris.fileformats.pp.load", new=load - ) as load, mock.patch("warnings.warn") as warn: + with mock.patch("iris.fileformats.pp.load", new=load) as load, mock.patch( + "warnings.warn" + ) as warn: (data_cube,) = iris.fileformats.pp.load_cubes("DUMMY") msg = ( @@ -417,9 +407,7 @@ def test_hybrid_height_round_trip_no_reference(self): warn.assert_called_with(msg, category=IrisUserWarning) # Check the data cube is set up to use hybrid height. - self._test_coord( - data_cube, model_level, standard_name="model_level_number" - ) + self._test_coord(data_cube, model_level, standard_name="model_level_number") self._test_coord( data_cube, delta, @@ -480,9 +468,7 @@ def create_cube(self, fp_min, fp_mid, fp_max, ref_offset, season=None): ) ) if season: - cube.add_aux_coord( - AuxCoord(long_name="clim_season", points=season) - ) + cube.add_aux_coord(AuxCoord(long_name="clim_season", points=season)) cube.add_cell_method(CellMethod("DUMMY", "clim_season")) return cube @@ -623,9 +609,7 @@ def test_save_irregular(self): @tests.skip_data class TestLoadLittleendian(tests.IrisTest): def test_load_sample(self): - file_path = tests.get_data_path( - ("PP", "little_endian", "qrparm.orog.pp") - ) + file_path = tests.get_data_path(("PP", "little_endian", "qrparm.orog.pp")) # Ensure it just loads. cube = iris.load_cube(file_path, "surface_altitude") self.assertEqual(cube.shape, (110, 160)) @@ -647,9 +631,7 @@ def check_minmax(array, expect_min, expect_max): @tests.skip_data class TestAsCubes(tests.IrisTest): def setUp(self): - dpath = tests.get_data_path( - ["PP", "meanMaxMin", "200806081200__qwpb.T24.pp"] - ) + dpath = tests.get_data_path(["PP", "meanMaxMin", "200806081200__qwpb.T24.pp"]) self.ppfs = iris.fileformats.pp.load(dpath) def test_pseudo_level_filter(self): @@ -684,9 +666,7 @@ def create_cube(self, longitude_coord="longitude"): cube = Cube(np.zeros((2, 3, 4))) tunit = Unit("days since epoch", calendar="standard") tcoord = DimCoord(np.arange(2), standard_name="time", units=tunit) - xcoord = DimCoord( - np.arange(3), standard_name=longitude_coord, units="degrees" - ) + xcoord = DimCoord(np.arange(3), standard_name=longitude_coord, units="degrees") ycoord = DimCoord(points=np.arange(4)) cube.add_dim_coord(tcoord, 0) cube.add_dim_coord(xcoord, 1) @@ -712,9 +692,7 @@ def test_longitudinal_mean_only(self): def test_grid_longitudinal_mean_only(self): cube = self.create_cube(longitude_coord="grid_longitude") - cube.add_cell_method( - CellMethod(method="mean", coords="grid_longitude") - ) + cube.add_cell_method(CellMethod(method="mean", coords="grid_longitude")) field = self.convert_cube_to_field(cube) self.assertEqual(int(field.lbproc), 64) @@ -740,9 +718,7 @@ def callback_ignore_cube_exception(cube, field, filename): return callback_ignore_cube_exception def test_ignore_cube_callback(self): - test_dataset = tests.get_data_path( - ["PP", "globClim1", "dec_subset.pp"] - ) + test_dataset = tests.get_data_path(["PP", "globClim1", "dec_subset.pp"]) exception_callback = self.callback_wrapper() result_cubes = iris.load(test_dataset, callback=exception_callback) n_result_cubes = len(result_cubes) diff --git a/lib/iris/tests/integration/test_regrid_equivalence.py b/lib/iris/tests/integration/test_regrid_equivalence.py index 6bcb1ce403..331a12a8ac 100644 --- a/lib/iris/tests/integration/test_regrid_equivalence.py +++ b/lib/iris/tests/integration/test_regrid_equivalence.py @@ -126,9 +126,7 @@ def test_source_mask(self): src_cube.data[1, 1] = np.ma.masked _debug_data(src_cube, "masked SOURCE") dst_cube = grid_cube(dst_x, dst_y) - result_cube = self.regrid( - src_cube, dst_cube, translate_nans_to_mask=True - ) + result_cube = self.regrid(src_cube, dst_cube, translate_nans_to_mask=True) _debug_data(result_cube, "masked RESULT") self.assertMaskedArrayEqual(result_cube.data, expected_result) diff --git a/lib/iris/tests/integration/test_regridding.py b/lib/iris/tests/integration/test_regridding.py index 44e9fef22e..833c059053 100644 --- a/lib/iris/tests/integration/test_regridding.py +++ b/lib/iris/tests/integration/test_regridding.py @@ -145,9 +145,7 @@ def test_linear_same_crs_global(self): sx_coord = self.src.coord(axis="x") sy_coord = self.src.coord(axis="y") x_coord = sx_coord.copy(points, bounds=bounds) - grid = iris.cube.Cube( - np.zeros([sy_coord.points.size, x_coord.points.size]) - ) + grid = iris.cube.Cube(np.zeros([sy_coord.points.size, x_coord.points.size])) grid.add_dim_coord(sy_coord, 0) grid.add_dim_coord(x_coord, 1) @@ -183,9 +181,7 @@ def setUp(self): grid_x.coord_system = grid_crs grid_y = sy_coord.copy(np.linspace(-10, 10, 100)) grid_y.coord_system = grid_crs - grid = iris.cube.Cube( - np.zeros([grid_y.points.size, grid_x.points.size]) - ) + grid = iris.cube.Cube(np.zeros([grid_y.points.size, grid_x.points.size])) grid.add_dim_coord(grid_y, 0) grid.add_dim_coord(grid_x, 1) diff --git a/lib/iris/tests/integration/test_trajectory.py b/lib/iris/tests/integration/test_trajectory.py index abe8fd0a2e..aa4ce67a3b 100644 --- a/lib/iris/tests/integration/test_trajectory.py +++ b/lib/iris/tests/integration/test_trajectory.py @@ -21,9 +21,7 @@ class TestColpex(tests.IrisTest): def setUp(self): # Load the COLPEX data => TZYX - path = tests.get_data_path( - ["PP", "COLPEX", "theta_and_orog_subset.pp"] - ) + path = tests.get_data_path(["PP", "COLPEX", "theta_and_orog_subset.pp"]) cube = iris.load_cube(path, "air_potential_temperature") cube.coord("grid_latitude").bounds = None cube.coord("grid_longitude").bounds = None @@ -39,12 +37,8 @@ def test_trajectory_extraction(self): [("grid_latitude", [-0.1188]), ("grid_longitude", [359.57958984])], ) expected = self.cube[..., 10, 0].data - self.assertArrayAllClose( - single_point[..., 0].data, expected, rtol=2.0e-7 - ) - self.assertCML( - single_point, ("trajectory", "single_point.cml"), checksum=False - ) + self.assertArrayAllClose(single_point[..., 0].data, expected, rtol=2.0e-7) + self.assertCML(single_point, ("trajectory", "single_point.cml"), checksum=False) def test_trajectory_extraction_calc(self): # Pull out another point and test against a manually calculated result. @@ -81,9 +75,7 @@ def test_trajectory_extraction_axis_aligned(self): trajectory = Trajectory(waypoints, sample_count=100) sample_points = self._traj_to_sample_points(trajectory) trajectory_cube = traj_interpolate(self.cube, sample_points) - self.assertCML( - trajectory_cube, ("trajectory", "constant_latitude.cml") - ) + self.assertCML(trajectory_cube, ("trajectory", "constant_latitude.cml")) def test_trajectory_extraction_zigzag(self): # Extract a zig-zag trajectory @@ -121,9 +113,7 @@ def test_trajectory_extraction_zigzag(self): dtype=np.float32, ) - self.assertCML( - trajectory_cube, ("trajectory", "zigzag.cml"), checksum=False - ) + self.assertCML(trajectory_cube, ("trajectory", "zigzag.cml"), checksum=False) self.assertArrayAllClose(trajectory_cube.data, expected, rtol=2.0e-7) def test_colpex__nearest(self): @@ -131,12 +121,8 @@ def test_colpex__nearest(self): # snapshot. test_cube = self.cube[0][0] # Test points on a regular grid, a bit larger than the source region. - xmin, xmax = [ - fn(test_cube.coord(axis="x").points) for fn in (np.min, np.max) - ] - ymin, ymax = [ - fn(test_cube.coord(axis="x").points) for fn in (np.min, np.max) - ] + xmin, xmax = [fn(test_cube.coord(axis="x").points) for fn in (np.min, np.max)] + ymin, ymax = [fn(test_cube.coord(axis="x").points) for fn in (np.min, np.max)] fractions = [-0.23, -0.01, 0.27, 0.624, 0.983, 1.052, 1.43] x_points = [xmin + frac * (xmax - xmin) for frac in fractions] y_points = [ymin + frac * (ymax - ymin) for frac in fractions] @@ -204,9 +190,7 @@ def test_colpex__nearest(self): class TestTriPolar(tests.IrisTest): def setUp(self): # load data - cubes = iris.load( - tests.get_data_path(["NetCDF", "ORCA2", "votemper.nc"]) - ) + cubes = iris.load(tests.get_data_path(["NetCDF", "ORCA2", "votemper.nc"])) cube = cubes[0] # The netCDF file has different data types for the points and # bounds of 'depth'. This wasn't previously supported, so we @@ -227,12 +211,8 @@ def setUp(self): def test_tri_polar(self): # extract - sampled_cube = traj_interpolate( - self.cube, self.sample_points, method="nearest" - ) - self.assertCML( - sampled_cube, ("trajectory", "tri_polar_latitude_slice.cml") - ) + sampled_cube = traj_interpolate(self.cube, self.sample_points, method="nearest") + self.assertCML(sampled_cube, ("trajectory", "tri_polar_latitude_slice.cml")) def test_tri_polar_method_linear_fails(self): # Try to request linear interpolation. diff --git a/lib/iris/tests/pp.py b/lib/iris/tests/pp.py index 3e07ccbd7f..a0265f9bbf 100644 --- a/lib/iris/tests/pp.py +++ b/lib/iris/tests/pp.py @@ -53,15 +53,11 @@ def cube_save_test( temp_pp_path = iris.util.create_temp_filename(".pp") try: iris.save(reference_cubes, temp_pp_path, **kwargs) - self._create_reference_txt( - reference_txt_path, temp_pp_path - ) + self._create_reference_txt(reference_txt_path, temp_pp_path) finally: os.remove(temp_pp_path) elif reference_pp_path: - self._create_reference_txt( - reference_txt_path, reference_pp_path - ) + self._create_reference_txt(reference_txt_path, reference_pp_path) else: raise ValueError( "Missing all of reference txt file, cubes, and PP path." diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index c66c13bba5..5979d1f0c7 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -19,13 +19,7 @@ from iris.coord_systems import GeogCS, RotatedGeogCS import iris.coords import iris.coords as icoords -from iris.coords import ( - AncillaryVariable, - AuxCoord, - CellMeasure, - CellMethod, - DimCoord, -) +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, CellMethod, DimCoord from iris.cube import Cube from ._stock_2d_latlons import ( # noqa @@ -143,9 +137,7 @@ def simple_2d(with_bounds=True): bounds=y_bounds if with_bounds else None, ) x_points = np.array([-7.5, 7.5, 22.5, 37.5]) - x_bounds = np.array( - [[-15, 0], [0, 15], [15, 30], [30, 45]], dtype=np.int32 - ) + x_bounds = np.array([[-15, 0], [0, 15], [15, 30], [30, 45]], dtype=np.int32) x_coord = DimCoord( x_points, long_name="foo", @@ -352,22 +344,16 @@ def track_1d(duplicate_x=False): coord = AuxCoord(pts, "projection_x_coordinate", units="1", bounds=bounds) cube.add_aux_coord(coord, [0]) if duplicate_x: - coord = AuxCoord( - pts, "projection_x_coordinate", units="1", bounds=bounds - ) + coord = AuxCoord(pts, "projection_x_coordinate", units="1", bounds=bounds) cube.add_aux_coord(coord, [0]) - coord = AuxCoord( - pts * 2, "projection_y_coordinate", units="1", bounds=bounds * 2 - ) + coord = AuxCoord(pts * 2, "projection_y_coordinate", units="1", bounds=bounds * 2) cube.add_aux_coord(coord, 0) return cube def simple_2d_w_multidim_and_scalars(): data = np.arange(50, dtype=np.int32).reshape((5, 10)) - cube = iris.cube.Cube( - data, long_name="test 2d dimensional cube", units="meters" - ) + cube = iris.cube.Cube(data, long_name="test 2d dimensional cube", units="meters") # DimCoords dim1 = DimCoord( @@ -462,9 +448,7 @@ def hybrid_height(): """ data = np.arange(12, dtype="i8").reshape((3, 4)) - orography = AuxCoord( - [10, 25, 50, 5], standard_name="surface_altitude", units="m" - ) + orography = AuxCoord([10, 25, 50, 5], standard_name="surface_altitude", units="m") model_level = AuxCoord([2, 1, 0], standard_name="model_level_number") level_height = DimCoord( [100, 50, 10], @@ -478,9 +462,7 @@ def hybrid_height(): long_name="sigma", bounds=[[0.7, 0.85], [0.85, 0.97], [0.97, 1.0]], ) - hybrid_height = iris.aux_factory.HybridHeightFactory( - level_height, sigma, orography - ) + hybrid_height = iris.aux_factory.HybridHeightFactory(level_height, sigma, orography) cube = iris.cube.Cube( data, @@ -505,28 +487,20 @@ def simple_4d_with_hybrid_height(): 0, ) cube.add_dim_coord( - DimCoord( - np.arange(4, dtype="i8") + 10, "model_level_number", units="1" - ), + DimCoord(np.arange(4, dtype="i8") + 10, "model_level_number", units="1"), 1, ) cube.add_dim_coord( - DimCoord( - np.arange(5, dtype="i8") + 20, "grid_latitude", units="degrees" - ), + DimCoord(np.arange(5, dtype="i8") + 20, "grid_latitude", units="degrees"), 2, ) cube.add_dim_coord( - DimCoord( - np.arange(6, dtype="i8") + 30, "grid_longitude", units="degrees" - ), + DimCoord(np.arange(6, dtype="i8") + 30, "grid_longitude", units="degrees"), 3, ) cube.add_aux_coord( - AuxCoord( - np.arange(4, dtype="i8") + 40, long_name="level_height", units="m" - ), + AuxCoord(np.arange(4, dtype="i8") + 40, long_name="level_height", units="m"), 1, ) cube.add_aux_coord( @@ -661,12 +635,8 @@ def realistic_4d(): units="1", attributes={"positive": "up"}, ) - sigma = icoords.AuxCoord( - sigma_pts, long_name="sigma", units="1", bounds=sigma_bnds - ) - orography = icoords.AuxCoord( - orography, standard_name="surface_altitude", units="m" - ) + sigma = icoords.AuxCoord(sigma_pts, long_name="sigma", units="1", bounds=sigma_bnds) + orography = icoords.AuxCoord(orography, standard_name="surface_altitude", units="m") time = icoords.DimCoord( time_pts, standard_name="time", units="hours since 1970-01-01 00:00:00" ) @@ -674,9 +644,7 @@ def realistic_4d(): forecast_period_pts, standard_name="forecast_period", units="hours" ) - hybrid_height = iris.aux_factory.HybridHeightFactory( - level_height, sigma, orography - ) + hybrid_height = iris.aux_factory.HybridHeightFactory(level_height, sigma, orography) cube = iris.cube.Cube( data, diff --git a/lib/iris/tests/stock/_stock_2d_latlons.py b/lib/iris/tests/stock/_stock_2d_latlons.py index 889f8bce12..401a4bafb0 100644 --- a/lib/iris/tests/stock/_stock_2d_latlons.py +++ b/lib/iris/tests/stock/_stock_2d_latlons.py @@ -87,8 +87,7 @@ def grid_coords_2d_from_1d(x_coord_1d, y_coord_1d): for coord in (x_coord_1d, y_coord_1d): if coord.ndim != 1: msg = ( - "Input coords must be one-dimensional. " - 'Coordinate "{}" has shape {}.' + "Input coords must be one-dimensional. " 'Coordinate "{}" has shape {}.' ) raise ValueError(msg.format(coord.name(), coord.shape)) @@ -295,9 +294,7 @@ def sample_cube(xargs, yargs): return cube -def make_bounds_discontiguous_at_point( - cube, at_iy, at_ix, in_y=False, upper=True -): +def make_bounds_discontiguous_at_point(cube, at_iy, at_ix, in_y=False, upper=True): """ Meddle with the XY grid bounds of a 2D cube to make the grid discontiguous. @@ -314,8 +311,7 @@ def make_bounds_discontiguous_at_point( y_coord = cube.coord(axis="y") assert x_coord.shape == y_coord.shape assert ( - coord.bounds.ndim == 3 and coord.shape[-1] == 4 - for coord in (x_coord, y_coord) + coord.bounds.ndim == 3 and coord.shape[-1] == 4 for coord in (x_coord, y_coord) ) # For both X and Y coord, move points + bounds to create a discontinuity. diff --git a/lib/iris/tests/stock/mesh.py b/lib/iris/tests/stock/mesh.py index 7726849252..e9ebfc647e 100644 --- a/lib/iris/tests/stock/mesh.py +++ b/lib/iris/tests/stock/mesh.py @@ -80,9 +80,7 @@ def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): edge_nodes = Connectivity(conns, cf_role="edge_node_connectivity") connectivities.append(edge_nodes) - edge_x = AuxCoord( - 2100 + arr.arange(n_edges), standard_name="longitude" - ) + edge_x = AuxCoord(2100 + arr.arange(n_edges), standard_name="longitude") edge_y = AuxCoord(2200 + arr.arange(n_edges), standard_name="latitude") edge_coords_and_axes = [(edge_x, "x"), (edge_y, "y")] @@ -97,9 +95,7 @@ def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): connectivities.append(face_nodes) # Some numbers for the edge coordinates. - face_x = AuxCoord( - 3100 + arr.arange(n_faces), standard_name="longitude" - ) + face_x = AuxCoord(3100 + arr.arange(n_faces), standard_name="longitude") face_y = AuxCoord(3200 + arr.arange(n_faces), standard_name="latitude") face_coords_and_axes = [(face_x, "x"), (face_y, "y")] @@ -127,9 +123,7 @@ def sample_meshcoord(mesh=None, location="face", axis="x", **extra_kwargs): return result -def sample_mesh_cube( - nomesh_faces=None, n_z=2, with_parts=False, **meshcoord_kwargs -): +def sample_mesh_cube(nomesh_faces=None, n_z=2, with_parts=False, **meshcoord_kwargs): """ Create a 2d test cube with 1 'normal' and 1 unstructured dimension (with a Mesh). @@ -168,9 +162,7 @@ def sample_mesh_cube( ) n_faces = meshx.shape[0] - mesh_dimco = DimCoord( - np.arange(n_faces), long_name="i_mesh_face", units="1" - ) + mesh_dimco = DimCoord(np.arange(n_faces), long_name="i_mesh_face", units="1") auxco_x = AuxCoord(np.zeros(n_faces), long_name="mesh_face_aux", units="1") diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py index 0f6a08b596..4886c5913e 100644 --- a/lib/iris/tests/stock/netcdf.py +++ b/lib/iris/tests/stock/netcdf.py @@ -19,9 +19,7 @@ NCGEN_PATHSTR = str(env_bin_path("ncgen")) -def ncgen_from_cdl( - cdl_str: Optional[str], cdl_path: Optional[str], nc_path: str -): +def ncgen_from_cdl(cdl_str: Optional[str], cdl_path: Optional[str], nc_path: str): """ Generate a test netcdf file from cdl. @@ -62,23 +60,17 @@ def ncgen_from_cdl( subprocess.run(call_args, check=True, **call_kwargs) -def _file_from_cdl_template( - temp_file_dir, dataset_name, dataset_type, template_subs -): +def _file_from_cdl_template(temp_file_dir, dataset_name, dataset_type, template_subs): """Shared template filling behaviour. Substitutes placeholders in the appropriate CDL template, saves to a NetCDF file. """ - nc_write_path = ( - Path(temp_file_dir).joinpath(dataset_name).with_suffix(".nc") - ) + nc_write_path = Path(temp_file_dir).joinpath(dataset_name).with_suffix(".nc") # Fetch the specified CDL template type. templates_dir = Path(__file__).parent / "file_headers" - template_filepath = templates_dir.joinpath(dataset_type).with_suffix( - ".cdl" - ) + template_filepath = templates_dir.joinpath(dataset_type).with_suffix(".cdl") # Substitute placeholders. with open(template_filepath) as file: template_string = Template(file.read()) diff --git a/lib/iris/tests/system_test.py b/lib/iris/tests/system_test.py index 440b544f94..013065981d 100644 --- a/lib/iris/tests/system_test.py +++ b/lib/iris/tests/system_test.py @@ -45,22 +45,16 @@ def horiz_cs(): 1, ) cm.add_aux_coord( - iris.coords.AuxCoord( - np.array([9], "i8"), "forecast_period", units="hours" - ) + iris.coords.AuxCoord(np.array([9], "i8"), "forecast_period", units="hours") ) hours_since_epoch = cf_units.Unit( "hours since epoch", cf_units.CALENDAR_STANDARD ) cm.add_aux_coord( - iris.coords.AuxCoord( - np.array([3], "i8"), "time", units=hours_since_epoch - ) + iris.coords.AuxCoord(np.array([3], "i8"), "time", units=hours_since_epoch) ) cm.add_aux_coord( - iris.coords.AuxCoord( - np.array([99], "i8"), long_name="pressure", units="Pa" - ) + iris.coords.AuxCoord(np.array([99], "i8"), long_name="pressure", units="Pa") ) filetypes = (".nc", ".pp") @@ -69,9 +63,7 @@ def horiz_cs(): iris.save(cm, saved_tmpfile) new_cube = iris.load_cube(saved_tmpfile) - self.assertCML( - new_cube, ("system", "supported_filetype_%s.cml" % filetype) - ) + self.assertCML(new_cube, ("system", "supported_filetype_%s.cml" % filetype)) def test_imports_general(self): if tests.MPL_AVAILABLE: diff --git a/lib/iris/tests/test_aggregate_by.py b/lib/iris/tests/test_aggregate_by.py index e34d2ff1bd..60a9018c09 100644 --- a/lib/iris/tests/test_aggregate_by.py +++ b/lib/iris/tests/test_aggregate_by.py @@ -39,9 +39,7 @@ def setUp(self): b = np.arange(36, dtype=np.int32).reshape(36, 1, 1) data = b * a - self.cube_single = iris.cube.Cube( - data, long_name="temperature", units="kelvin" - ) + self.cube_single = iris.cube.Cube(data, long_name="temperature", units="kelvin") z_points = np.array( [ @@ -106,9 +104,7 @@ def setUp(self): b = np.arange(20, dtype=np.int32).reshape(20, 1, 1) data = b * a - self.cube_multi = iris.cube.Cube( - data, long_name="temperature", units="kelvin" - ) + self.cube_multi = iris.cube.Cube(data, long_name="temperature", units="kelvin") z1_points = np.array( [1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 4, 4, 4, 4, 4, 1, 5, 5, 2, 2], @@ -142,9 +138,7 @@ def setUp(self): # mask_single = np.vstack( ( - np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( - 26, axis=0 - ), + np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat(26, axis=0), np.zeros([10, 3, 3]), ) ) @@ -153,9 +147,7 @@ def setUp(self): ) mask_multi = np.vstack( ( - np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( - 16, axis=0 - ), + np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat(16, axis=0), np.ones([2, 3, 3]), np.zeros([2, 3, 3]), ) @@ -373,29 +365,19 @@ def setUp(self): def test_single(self): # mean group-by with single coordinate name. - aggregateby_cube = self.cube_single.aggregated_by( - "height", iris.analysis.MEAN - ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "single.cml") - ) + aggregateby_cube = self.cube_single.aggregated_by("height", iris.analysis.MEAN) + self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "single.cml")) # mean group-by with single coordinate. aggregateby_cube = self.cube_single.aggregated_by( self.coord_z_single, iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "single.cml") - ) + self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "single.cml")) - np.testing.assert_almost_equal( - aggregateby_cube.data, self.single_expected - ) + np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) # rms group-by with single coordinate name. - aggregateby_cube = self.cube_single.aggregated_by( - "height", iris.analysis.RMS - ) + aggregateby_cube = self.cube_single.aggregated_by("height", iris.analysis.RMS) self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "single_rms.cml") ) @@ -408,33 +390,23 @@ def test_single(self): aggregateby_cube, ("analysis", "aggregated_by", "single_rms.cml") ) - np.testing.assert_almost_equal( - aggregateby_cube.data, self.single_rms_expected - ) + np.testing.assert_almost_equal(aggregateby_cube.data, self.single_rms_expected) def test_str_aggregation_single_weights_none(self): # mean group-by with single coordinate name. aggregateby_cube = self.cube_single.aggregated_by( "height", iris.analysis.MEAN, weights=None ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "single.cml") - ) - np.testing.assert_almost_equal( - aggregateby_cube.data, self.single_expected - ) + self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "single.cml")) + np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) def test_coord_aggregation_single_weights_none(self): # mean group-by with single coordinate. aggregateby_cube = self.cube_single.aggregated_by( self.coord_z_single, iris.analysis.MEAN, weights=None ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "single.cml") - ) - np.testing.assert_almost_equal( - aggregateby_cube.data, self.single_expected - ) + self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "single.cml")) + np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) def test_weighted_single(self): # weighted mean group-by with single coordinate name. @@ -466,15 +438,11 @@ def test_weighted_single(self): def test_single_shared(self): z2_points = np.arange(36, dtype=np.int32) - coord_z2 = iris.coords.AuxCoord( - z2_points, long_name="wibble", units="1" - ) + coord_z2 = iris.coords.AuxCoord(z2_points, long_name="wibble", units="1") self.cube_single.add_aux_coord(coord_z2, 0) # group-by with single coordinate name on shared axis. - aggregateby_cube = self.cube_single.aggregated_by( - "height", iris.analysis.MEAN - ) + aggregateby_cube = self.cube_single.aggregated_by("height", iris.analysis.MEAN) self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "single_shared.cml"), @@ -489,15 +457,11 @@ def test_single_shared(self): ("analysis", "aggregated_by", "single_shared.cml"), ) - np.testing.assert_almost_equal( - aggregateby_cube.data, self.single_expected - ) + np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) def test_weighted_single_shared(self): z2_points = np.arange(36, dtype=np.int32) - coord_z2 = iris.coords.AuxCoord( - z2_points, long_name="wibble", units="1" - ) + coord_z2 = iris.coords.AuxCoord(z2_points, long_name="wibble", units="1") self.cube_single.add_aux_coord(coord_z2, 0) # weighted group-by with single coordinate name on shared axis. @@ -533,9 +497,7 @@ def test_single_shared_circular(self): self.cube_single.add_aux_coord(circ_coord, 0) # group-by with single coordinate name on shared axis. - aggregateby_cube = self.cube_single.aggregated_by( - "height", iris.analysis.MEAN - ) + aggregateby_cube = self.cube_single.aggregated_by("height", iris.analysis.MEAN) self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "single_shared_circular.cml"), @@ -543,16 +505,12 @@ def test_single_shared_circular(self): # group-by with single coordinate on shared axis. coord = self.cube_single.coords("height") - aggregateby_cube = self.cube_single.aggregated_by( - coord, iris.analysis.MEAN - ) + aggregateby_cube = self.cube_single.aggregated_by(coord, iris.analysis.MEAN) self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "single_shared_circular.cml"), ) - np.testing.assert_almost_equal( - aggregateby_cube.data, self.single_expected - ) + np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) def test_weighted_single_shared_circular(self): points = np.arange(36) * 10.0 @@ -601,37 +559,27 @@ def test_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( ["height", "level"], iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "multi.cml") - ) + self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "multi.cml")) # group-by with multiple coordinate names (different order). aggregateby_cube = self.cube_multi.aggregated_by( ["level", "height"], iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "multi.cml") - ) + self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "multi.cml")) # group-by with multiple coordinates. aggregateby_cube = self.cube_multi.aggregated_by( [self.coord_z1_multi, self.coord_z2_multi], iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "multi.cml") - ) + self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "multi.cml")) # group-by with multiple coordinates (different order). aggregateby_cube = self.cube_multi.aggregated_by( [self.coord_z2_multi, self.coord_z1_multi], iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "multi.cml") - ) + self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "multi.cml")) - np.testing.assert_almost_equal( - aggregateby_cube.data, self.multi_expected - ) + np.testing.assert_almost_equal(aggregateby_cube.data, self.multi_expected) def test_weighted_multi(self): # weighted group-by with multiple coordinate names. @@ -684,13 +632,9 @@ def test_weighted_multi(self): def test_multi_shared(self): z3_points = np.arange(20, dtype=np.int32) - coord_z3 = iris.coords.AuxCoord( - z3_points, long_name="sigma", units="1" - ) + coord_z3 = iris.coords.AuxCoord(z3_points, long_name="sigma", units="1") z4_points = np.arange(19, -1, -1, dtype=np.int32) - coord_z4 = iris.coords.AuxCoord( - z4_points, long_name="gamma", units="1" - ) + coord_z4 = iris.coords.AuxCoord(z4_points, long_name="gamma", units="1") self.cube_multi.add_aux_coord(coord_z3, 0) self.cube_multi.add_aux_coord(coord_z4, 0) @@ -728,19 +672,13 @@ def test_multi_shared(self): aggregateby_cube, ("analysis", "aggregated_by", "multi_shared.cml") ) - np.testing.assert_almost_equal( - aggregateby_cube.data, self.multi_expected - ) + np.testing.assert_almost_equal(aggregateby_cube.data, self.multi_expected) def test_weighted_multi_shared(self): z3_points = np.arange(20, dtype=np.int32) - coord_z3 = iris.coords.AuxCoord( - z3_points, long_name="sigma", units="1" - ) + coord_z3 = iris.coords.AuxCoord(z3_points, long_name="sigma", units="1") z4_points = np.arange(19, -1, -1, dtype=np.int32) - coord_z4 = iris.coords.AuxCoord( - z4_points, long_name="gamma", units="1" - ) + coord_z4 = iris.coords.AuxCoord(z4_points, long_name="gamma", units="1") self.cube_multi.add_aux_coord(coord_z3, 0) self.cube_multi.add_aux_coord(coord_z4, 0) @@ -799,14 +737,10 @@ def test_easy(self): # # Easy mean aggregate test by each coordinate. # - aggregateby_cube = self.cube_easy.aggregated_by( - "longitude", iris.analysis.MEAN - ) + aggregateby_cube = self.cube_easy.aggregated_by("longitude", iris.analysis.MEAN) np.testing.assert_almost_equal( aggregateby_cube.data, - np.array( - [[8.0, 15.0], [10.0, 17.0], [15.0, 8.0]], dtype=np.float32 - ), + np.array([[8.0, 15.0], [10.0, 17.0], [15.0, 8.0]], dtype=np.float32), ) self.assertCML( @@ -814,9 +748,7 @@ def test_easy(self): ("analysis", "aggregated_by", "easy.cml"), ) - aggregateby_cube = self.cube_easy.aggregated_by( - "latitude", iris.analysis.MEAN - ) + aggregateby_cube = self.cube_easy.aggregated_by("latitude", iris.analysis.MEAN) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -828,19 +760,13 @@ def test_easy(self): # # Easy max aggregate test by each coordinate. # - aggregateby_cube = self.cube_easy.aggregated_by( - "longitude", iris.analysis.MAX - ) + aggregateby_cube = self.cube_easy.aggregated_by("longitude", iris.analysis.MAX) np.testing.assert_almost_equal( aggregateby_cube.data, - np.array( - [[10.0, 18.0], [12.0, 20.0], [18.0, 10.0]], dtype=np.float32 - ), + np.array([[10.0, 18.0], [12.0, 20.0], [18.0, 10.0]], dtype=np.float32), ) - aggregateby_cube = self.cube_easy.aggregated_by( - "latitude", iris.analysis.MAX - ) + aggregateby_cube = self.cube_easy.aggregated_by("latitude", iris.analysis.MAX) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -852,19 +778,13 @@ def test_easy(self): # # Easy sum aggregate test by each coordinate. # - aggregateby_cube = self.cube_easy.aggregated_by( - "longitude", iris.analysis.SUM - ) + aggregateby_cube = self.cube_easy.aggregated_by("longitude", iris.analysis.SUM) np.testing.assert_almost_equal( aggregateby_cube.data, - np.array( - [[16.0, 30.0], [20.0, 34.0], [30.0, 16.0]], dtype=np.float32 - ), + np.array([[16.0, 30.0], [20.0, 34.0], [30.0, 16.0]], dtype=np.float32), ) - aggregateby_cube = self.cube_easy.aggregated_by( - "latitude", iris.analysis.SUM - ) + aggregateby_cube = self.cube_easy.aggregated_by("latitude", iris.analysis.SUM) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -881,9 +801,7 @@ def test_easy(self): ) np.testing.assert_almost_equal( aggregateby_cube.data, - np.array( - [[7.0, 13.5], [9.0, 15.5], [13.5, 7.0]], dtype=np.float32 - ), + np.array([[7.0, 13.5], [9.0, 15.5], [13.5, 7.0]], dtype=np.float32), ) aggregateby_cube = self.cube_easy.aggregated_by( @@ -900,9 +818,7 @@ def test_easy(self): # # Easy root mean square aggregate test by each coordinate. # - aggregateby_cube = self.cube_easy.aggregated_by( - "longitude", iris.analysis.RMS - ) + aggregateby_cube = self.cube_easy.aggregated_by("longitude", iris.analysis.RMS) row = [ list(np.sqrt([68.0, 234.0])), list(np.sqrt([104.0, 298.0])), @@ -912,9 +828,7 @@ def test_easy(self): aggregateby_cube.data, np.array(row, dtype=np.float32) ) - aggregateby_cube = self.cube_easy.aggregated_by( - "latitude", iris.analysis.RMS - ) + aggregateby_cube = self.cube_easy.aggregated_by("latitude", iris.analysis.RMS) row = [ list(np.sqrt([50.0, 122.0, 170.0, 362.0])), [18.0, 12.0, 10.0, 6.0], @@ -1008,9 +922,7 @@ def test_weighted_easy(self): ) np.testing.assert_almost_equal( aggregateby_cube.data, - np.array( - [[3.0, np.sqrt(65.0)], [np.sqrt(0.4), 4.0]], dtype=np.float32 - ), + np.array([[3.0, np.sqrt(65.0)], [np.sqrt(0.4), 4.0]], dtype=np.float32), ) aggregateby_cube = self.cube_easy_weighted.aggregated_by( @@ -1078,9 +990,7 @@ def test_single_missing(self): aggregateby_cube, ("analysis", "aggregated_by", "single_missing.cml"), ) - self.assertMaskedArrayAlmostEqual( - aggregateby_cube.data, single_expected - ) + self.assertMaskedArrayAlmostEqual(aggregateby_cube.data, single_expected) def test_weighted_single_missing(self): # weighted aggregation correctly handles masked data @@ -1202,9 +1112,7 @@ def test_multi_missing(self): aggregateby_cube, ("analysis", "aggregated_by", "multi_missing.cml"), ) - self.assertMaskedArrayAlmostEqual( - aggregateby_cube.data, multi_expected - ) + self.assertMaskedArrayAlmostEqual(aggregateby_cube.data, multi_expected) def test_weighted_multi_missing(self): # weighted aggregation correctly handles masked data @@ -1362,9 +1270,7 @@ class TestAggregateByWeightedByCube(TestAggregateBy): def setUp(self): super().setUp() - self.weights_single = self.cube_single[:, 0, 0].copy( - self.weights_single - ) + self.weights_single = self.cube_single[:, 0, 0].copy(self.weights_single) self.weights_single.units = "m2" self.weights_multi = self.cube_multi[:, 0, 0].copy(self.weights_multi) self.weights_multi.units = "m2" diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index f611e25c4e..6ed02b8ad4 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -31,10 +31,7 @@ def assertComparisonDict(self, comparison_dict, reference_filename): coord_groups = comparison_dict[key] string += "%40s " % key names = [ - [ - coord.name() if coord is not None else "None" - for coord in coords - ] + [coord.name() if coord is not None else "None" for coord in coords] for coords in coord_groups ] string += str(sorted(names)) @@ -84,9 +81,7 @@ def test_coord_comparison(self): ), 1, ) - cube2.add_dim_coord( - iris.coords.DimCoord([5, 7, 9, 11, 13], long_name="z"), 2 - ) + cube2.add_dim_coord(iris.coords.DimCoord([5, 7, 9, 11, 13], long_name="z"), 2) cube3 = cube1.copy() lon = cube3.coord("longitude") @@ -262,18 +257,14 @@ def test_weighted_mean(self): f, collapsed_area_weights = e.collapsed( "latitude", iris.analysis.MEAN, weights=area_weights, returned=True ) - g = f.collapsed( - "longitude", iris.analysis.MEAN, weights=collapsed_area_weights - ) + g = f.collapsed("longitude", iris.analysis.MEAN, weights=collapsed_area_weights) # check it's a 0d, scalar cube self.assertEqual(g.shape, ()) # check the value - pp_area_avg's result of 287.927 differs by factor of 1.00002959 np.testing.assert_approx_equal(g.data, 287.935, significant=5) # check we get summed weights even if we don't give any - h, summed_weights = e.collapsed( - "latitude", iris.analysis.MEAN, returned=True - ) + h, summed_weights = e.collapsed("latitude", iris.analysis.MEAN, returned=True) assert summed_weights is not None # Check there was no residual change @@ -352,9 +343,7 @@ def test_std_dev(self): def test_hmean(self): # harmonic mean requires data > 0 self.cube.data *= self.cube.data - self._common( - "hmean", iris.analysis.HMEAN, "original_hmean.cml", rtol=1e-05 - ) + self._common("hmean", iris.analysis.HMEAN, "original_hmean.cml", rtol=1e-05) def test_gmean(self): self._common("gmean", iris.analysis.GMEAN, rtol=1e-05) @@ -429,9 +418,7 @@ def setUp(self): self.cube_with_aux_coord.coord("grid_longitude").guess_bounds() def test_max(self): - cube = self.cube_with_aux_coord.collapsed( - "grid_latitude", iris.analysis.MAX - ) + cube = self.cube_with_aux_coord.collapsed("grid_latitude", iris.analysis.MAX) np.testing.assert_array_equal( cube.coord("surface_altitude").points, np.array([112, 113, 114, 115, 116, 117]), @@ -452,9 +439,7 @@ def test_max(self): ) # Check collapsing over the whole coord still works - cube = self.cube_with_aux_coord.collapsed( - "altitude", iris.analysis.MAX - ) + cube = self.cube_with_aux_coord.collapsed("altitude", iris.analysis.MAX) np.testing.assert_array_equal( cube.coord("surface_altitude").points, np.array([114]) @@ -464,9 +449,7 @@ def test_max(self): cube.coord("surface_altitude").bounds, np.array([[100, 129]]) ) - cube = self.cube_with_aux_coord.collapsed( - "grid_longitude", iris.analysis.MAX - ) + cube = self.cube_with_aux_coord.collapsed("grid_longitude", iris.analysis.MAX) np.testing.assert_array_equal( cube.coord("surface_altitude").points, @@ -475,9 +458,7 @@ def test_max(self): np.testing.assert_array_equal( cube.coord("surface_altitude").bounds, - np.array( - [[100, 105], [106, 111], [112, 117], [118, 123], [124, 129]] - ), + np.array([[100, 105], [106, 111], [112, 117], [118, 123], [124, 129]]), ) @@ -500,9 +481,7 @@ def setUp(self): self.cube = cube def test_single_coord_no_mdtol(self): - collapsed = self.cube.collapsed( - self.cube.coord("lat"), iris.analysis.MEAN - ) + collapsed = self.cube.collapsed(self.cube.coord("lat"), iris.analysis.MEAN) t = ma.array([2.5, 5.0], mask=[False, True]) self.assertMaskedArrayEqual(collapsed.data, t) @@ -563,9 +542,7 @@ def _check_collapsed_percentile( if CML_filename is not None: self.assertCML(result, ("analysis", CML_filename), checksum=False) - def _check_percentile( - self, data, axis, percents, expected_result, **kwargs - ): + def _check_percentile(self, data, axis, percents, expected_result, **kwargs): result = iris.analysis._percentile(data, axis, percents, **kwargs) np.testing.assert_array_almost_equal(result, expected_result) self.assertEqual(type(result), type(expected_result)) @@ -834,9 +811,7 @@ def test_proportion(self): "foo", iris.analysis.PROPORTION, function=lambda val: val >= 5 ) np.testing.assert_array_almost_equal(gt5.data, np.array([6 / 11.0])) - self.assertCML( - gt5, ("analysis", "proportion_foo_1d.cml"), checksum=False - ) + self.assertCML(gt5, ("analysis", "proportion_foo_1d.cml"), checksum=False) def test_proportion_2d(self): cube = tests.stock.simple_2d() @@ -847,9 +822,7 @@ def test_proportion_2d(self): np.testing.assert_array_almost_equal( gt6.data, np.array([0, 0.5, 1], dtype=np.float32) ) - self.assertCML( - gt6, ("analysis", "proportion_foo_2d.cml"), checksum=False - ) + self.assertCML(gt6, ("analysis", "proportion_foo_2d.cml"), checksum=False) gt6 = cube.collapsed( "bar", iris.analysis.PROPORTION, function=lambda val: val >= 6 @@ -857,9 +830,7 @@ def test_proportion_2d(self): np.testing.assert_array_almost_equal( gt6.data, np.array([1 / 3, 1 / 3, 2 / 3, 2 / 3], dtype=np.float32) ) - self.assertCML( - gt6, ("analysis", "proportion_bar_2d.cml"), checksum=False - ) + self.assertCML(gt6, ("analysis", "proportion_bar_2d.cml"), checksum=False) gt6 = cube.collapsed( ("foo", "bar"), @@ -869,9 +840,7 @@ def test_proportion_2d(self): np.testing.assert_array_almost_equal( gt6.data, np.array([0.5], dtype=np.float32) ) - self.assertCML( - gt6, ("analysis", "proportion_foo_bar_2d.cml"), checksum=False - ) + self.assertCML(gt6, ("analysis", "proportion_foo_bar_2d.cml"), checksum=False) # mask the data cube.data = ma.array(cube.data, mask=cube.data % 2) @@ -895,9 +864,7 @@ def test_proportion_2d(self): def test_count(self): cube = tests.stock.simple_1d() - gt5 = cube.collapsed( - "foo", iris.analysis.COUNT, function=lambda val: val >= 5 - ) + gt5 = cube.collapsed("foo", iris.analysis.COUNT, function=lambda val: val >= 5) np.testing.assert_array_almost_equal(gt5.data, np.array([6])) gt5.data = gt5.data.astype("i8") self.assertCML(gt5, ("analysis", "count_foo_1d.cml"), checksum=False) @@ -905,18 +872,14 @@ def test_count(self): def test_count_2d(self): cube = tests.stock.simple_2d() - gt6 = cube.collapsed( - "foo", iris.analysis.COUNT, function=lambda val: val >= 6 - ) + gt6 = cube.collapsed("foo", iris.analysis.COUNT, function=lambda val: val >= 6) np.testing.assert_array_almost_equal( gt6.data, np.array([0, 2, 4], dtype=np.float32) ) gt6.data = gt6.data.astype("i8") self.assertCML(gt6, ("analysis", "count_foo_2d.cml"), checksum=False) - gt6 = cube.collapsed( - "bar", iris.analysis.COUNT, function=lambda val: val >= 6 - ) + gt6 = cube.collapsed("bar", iris.analysis.COUNT, function=lambda val: val >= 6) np.testing.assert_array_almost_equal( gt6.data, np.array([1, 1, 2, 2], dtype=np.float32) ) @@ -926,13 +889,9 @@ def test_count_2d(self): gt6 = cube.collapsed( ("foo", "bar"), iris.analysis.COUNT, function=lambda val: val >= 6 ) - np.testing.assert_array_almost_equal( - gt6.data, np.array([6], dtype=np.float32) - ) + np.testing.assert_array_almost_equal(gt6.data, np.array([6], dtype=np.float32)) gt6.data = gt6.data.astype("i8") - self.assertCML( - gt6, ("analysis", "count_foo_bar_2d.cml"), checksum=False - ) + self.assertCML(gt6, ("analysis", "count_foo_bar_2d.cml"), checksum=False) def test_max_run_1d(self): cube = tests.stock.simple_1d() @@ -945,9 +904,7 @@ def test_max_run_1d(self): self.assertArrayEqual(result.data, np.array(3)) self.assertEqual(result.units, 1) self.assertTupleEqual(result.cell_methods, ()) - self.assertCML( - result, ("analysis", "max_run_foo_1d.cml"), checksum=False - ) + self.assertCML(result, ("analysis", "max_run_foo_1d.cml"), checksum=False) def test_max_run_lazy(self): cube = tests.stock.simple_1d() @@ -965,9 +922,7 @@ def test_max_run_lazy(self): self.assertArrayEqual(result.data, np.array(3)) self.assertEqual(result.units, 1) self.assertTupleEqual(result.cell_methods, ()) - self.assertCML( - result, ("analysis", "max_run_foo_1d.cml"), checksum=False - ) + self.assertCML(result, ("analysis", "max_run_foo_1d.cml"), checksum=False) def test_max_run_2d(self): cube = tests.stock.simple_2d() @@ -979,28 +934,20 @@ def test_max_run_2d(self): iris.analysis.MAX_RUN, function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), ) - self.assertArrayEqual( - foo_result.data, np.array([1, 2, 1], dtype=np.float32) - ) + self.assertArrayEqual(foo_result.data, np.array([1, 2, 1], dtype=np.float32)) self.assertEqual(foo_result.units, 1) self.assertTupleEqual(foo_result.cell_methods, ()) - self.assertCML( - foo_result, ("analysis", "max_run_foo_2d.cml"), checksum=False - ) + self.assertCML(foo_result, ("analysis", "max_run_foo_2d.cml"), checksum=False) bar_result = cube.collapsed( "bar", iris.analysis.MAX_RUN, function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), ) - self.assertArrayEqual( - bar_result.data, np.array([2, 2, 0, 3], dtype=np.float32) - ) + self.assertArrayEqual(bar_result.data, np.array([2, 2, 0, 3], dtype=np.float32)) self.assertEqual(bar_result.units, 1) self.assertTupleEqual(bar_result.cell_methods, ()) - self.assertCML( - bar_result, ("analysis", "max_run_bar_2d.cml"), checksum=False - ) + self.assertCML(bar_result, ("analysis", "max_run_bar_2d.cml"), checksum=False) with self.assertRaises(ValueError): _ = cube.collapsed( @@ -1025,9 +972,7 @@ def test_max_run_masked(self): iris.analysis.MAX_RUN, function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 9, 10, 11]), ) - self.assertArrayEqual( - result.data, np.array([1, 1, 2, 0], dtype=np.float32) - ) + self.assertArrayEqual(result.data, np.array([1, 1, 2, 0], dtype=np.float32)) self.assertEqual(result.units, 1) self.assertTupleEqual(result.cell_methods, ()) self.assertCML( @@ -1039,22 +984,16 @@ def test_weighted_sum_consistency(self): cube = tests.stock.simple_1d() normal_sum = cube.collapsed("foo", iris.analysis.SUM) weights = np.ones_like(cube.data) - weighted_sum = cube.collapsed( - "foo", iris.analysis.SUM, weights=weights - ) + weighted_sum = cube.collapsed("foo", iris.analysis.SUM, weights=weights) self.assertArrayAlmostEqual(normal_sum.data, weighted_sum.data) def test_weighted_sum_1d(self): # verify 1d weighted sum is correct cube = tests.stock.simple_1d() - weights = np.array( - [0.05, 0.05, 0.1, 0.1, 0.2, 0.3, 0.2, 0.1, 0.1, 0.05, 0.05] - ) + weights = np.array([0.05, 0.05, 0.1, 0.1, 0.2, 0.3, 0.2, 0.1, 0.1, 0.05, 0.05]) result = cube.collapsed("foo", iris.analysis.SUM, weights=weights) self.assertAlmostEqual(result.data, 6.5) - self.assertCML( - result, ("analysis", "sum_weighted_1d.cml"), checksum=False - ) + self.assertCML(result, ("analysis", "sum_weighted_1d.cml"), checksum=False) def test_weighted_sum_2d(self): # verify 2d weighted sum is correct @@ -1062,12 +1001,8 @@ def test_weighted_sum_2d(self): weights = np.array([0.3, 0.4, 0.3]) weights = iris.util.broadcast_to_shape(weights, cube.shape, [0]) result = cube.collapsed("bar", iris.analysis.SUM, weights=weights) - self.assertArrayAlmostEqual( - result.data, np.array([4.0, 5.0, 6.0, 7.0]) - ) - self.assertCML( - result, ("analysis", "sum_weighted_2d.cml"), checksum=False - ) + self.assertArrayAlmostEqual(result.data, np.array([4.0, 5.0, 6.0, 7.0])) + self.assertCML(result, ("analysis", "sum_weighted_2d.cml"), checksum=False) def test_weighted_rms(self): cube = tests.stock.simple_2d() @@ -1083,9 +1018,7 @@ def test_weighted_rms(self): expected_result = np.array([8.0, 24.0, 16.0]) result = cube.collapsed("foo", iris.analysis.RMS, weights=weights) self.assertArrayAlmostEqual(result.data, expected_result) - self.assertCML( - result, ("analysis", "rms_weighted_2d.cml"), checksum=False - ) + self.assertCML(result, ("analysis", "rms_weighted_2d.cml"), checksum=False) @tests.skip_data @@ -1128,17 +1061,11 @@ def test_unrotate_nd(self): rlons = np.array([[350.0, 352.0], [350.0, 352.0]]) rlats = np.array([[-5.0, -0.0], [-4.0, -1.0]]) - resx, resy = iris.analysis.cartography.unrotate_pole( - rlons, rlats, 178.0, 38.0 - ) + resx, resy = iris.analysis.cartography.unrotate_pole(rlons, rlats, 178.0, 38.0) # Solutions derived by proj4 direct. - solx = np.array( - [[-16.42176094, -14.85892262], [-16.71055023, -14.58434624]] - ) - soly = np.array( - [[46.00724251, 51.29188893], [46.98728486, 50.30706042]] - ) + solx = np.array([[-16.42176094, -14.85892262], [-16.71055023, -14.58434624]]) + soly = np.array([[46.00724251, 51.29188893], [46.98728486, 50.30706042]]) self.assertArrayAlmostEqual(resx, solx) self.assertArrayAlmostEqual(resy, soly) @@ -1152,9 +1079,7 @@ def test_unrotate_1d(self): ) # Solutions derived by proj4 direct. - solx = np.array( - [-16.42176094, -14.85892262, -12.88946157, -10.35078336] - ) + solx = np.array([-16.42176094, -14.85892262, -12.88946157, -10.35078336]) soly = np.array([46.00724251, 51.29188893, 56.55031485, 61.77015703]) self.assertArrayAlmostEqual(resx, solx) @@ -1164,17 +1089,11 @@ def test_rotate_nd(self): rlons = np.array([[350.0, 351.0], [352.0, 353.0]]) rlats = np.array([[10.0, 15.0], [20.0, 25.0]]) - resx, resy = iris.analysis.cartography.rotate_pole( - rlons, rlats, 20.0, 80.0 - ) + resx, resy = iris.analysis.cartography.rotate_pole(rlons, rlats, 20.0, 80.0) # Solutions derived by proj4 direct. - solx = np.array( - [[148.69672569, 149.24727087], [149.79067025, 150.31754368]] - ) - soly = np.array( - [[18.60905789, 23.67749384], [28.74419024, 33.8087963]] - ) + solx = np.array([[148.69672569, 149.24727087], [149.79067025, 150.31754368]]) + soly = np.array([[18.60905789, 23.67749384], [28.74419024, 33.8087963]]) self.assertArrayAlmostEqual(resx, solx) self.assertArrayAlmostEqual(resy, soly) @@ -1188,9 +1107,7 @@ def test_rotate_1d(self): ) # Solutions derived by proj4 direct. - solx = np.array( - [148.69672569, 149.24727087, 149.79067025, 150.31754368] - ) + solx = np.array([148.69672569, 149.24727087, 149.79067025, 150.31754368]) soly = np.array([18.60905789, 23.67749384, 28.74419024, 33.8087963]) self.assertArrayAlmostEqual(resx, solx) @@ -1300,9 +1217,7 @@ def test_area_weights_singletons(self): def test_area_weights_normalized(self): # normalized area weights must sum to one over lat/lon dimensions. - weights = iris.analysis.cartography.area_weights( - self.cube, normalize=True - ) + weights = iris.analysis.cartography.area_weights(self.cube, normalize=True) sumweights = weights.sum(axis=3).sum(axis=2) # sum over lon and lat self.assertArrayAlmostEqual(sumweights, 1) @@ -1368,52 +1283,36 @@ def test_cosine_latitude_weights_0d(self): self.cube_dim_lat[:, 0, :] ) self.assertEqual(weights.shape, self.cube_dim_lat[:, 0, :].shape) - self.assertAlmostEqual( - weights[0, 0], np.cos(np.deg2rad(self.lat1d[0])) - ) + self.assertAlmostEqual(weights[0, 0], np.cos(np.deg2rad(self.lat1d[0]))) def test_cosine_latitude_weights_1d_singleton(self): # singleton (1-point) 1d latitude coordinate (time, lat, lon) cube = self.cube_dim_lat[:, 0:1, :] weights = iris.analysis.cartography.cosine_latitude_weights(cube) self.assertEqual(weights.shape, cube.shape) - self.assertAlmostEqual( - weights[0, 0, 0], np.cos(np.deg2rad(self.lat1d[0])) - ) + self.assertAlmostEqual(weights[0, 0, 0], np.cos(np.deg2rad(self.lat1d[0]))) def test_cosine_latitude_weights_1d(self): # 1d latitude coordinate (time, lat, lon) - weights = iris.analysis.cartography.cosine_latitude_weights( - self.cube_dim_lat - ) + weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) self.assertEqual(weights.shape, self.cube.shape) - self.assertArrayAlmostEqual( - weights[0, :, 0], np.cos(np.deg2rad(self.lat1d)) - ) + self.assertArrayAlmostEqual(weights[0, :, 0], np.cos(np.deg2rad(self.lat1d))) def test_cosine_latitude_weights_1d_latitude_first(self): # 1d latitude coordinate with latitude first (lat, time, lon) order = [1, 0, 2] # (lat, time, lon) self.cube_dim_lat.transpose(order) - weights = iris.analysis.cartography.cosine_latitude_weights( - self.cube_dim_lat - ) + weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) self.assertEqual(weights.shape, self.cube_dim_lat.shape) - self.assertArrayAlmostEqual( - weights[:, 0, 0], np.cos(np.deg2rad(self.lat1d)) - ) + self.assertArrayAlmostEqual(weights[:, 0, 0], np.cos(np.deg2rad(self.lat1d))) def test_cosine_latitude_weights_1d_latitude_last(self): # 1d latitude coordinate with latitude last (time, lon, lat) order = [0, 2, 1] # (time, lon, lat) self.cube_dim_lat.transpose(order) - weights = iris.analysis.cartography.cosine_latitude_weights( - self.cube_dim_lat - ) + weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) self.assertEqual(weights.shape, self.cube_dim_lat.shape) - self.assertArrayAlmostEqual( - weights[0, 0, :], np.cos(np.deg2rad(self.lat1d)) - ) + self.assertArrayAlmostEqual(weights[0, 0, :], np.cos(np.deg2rad(self.lat1d))) def test_cosine_latitude_weights_2d_singleton1(self): # 2d latitude coordinate with first dimension singleton @@ -1444,45 +1343,31 @@ def test_cosine_latitude_weights_2d_singleton3(self): def test_cosine_latitude_weights_2d(self): # 2d latitude coordinate (time, lat, lon) - weights = iris.analysis.cartography.cosine_latitude_weights( - self.cube_aux_lat - ) + weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_aux_lat) self.assertEqual(weights.shape, self.cube_aux_lat.shape) - self.assertArrayAlmostEqual( - weights[0, :, :], np.cos(np.deg2rad(self.lat2d)) - ) + self.assertArrayAlmostEqual(weights[0, :, :], np.cos(np.deg2rad(self.lat2d))) def test_cosine_latitude_weights_2d_latitude_first(self): # 2d latitude coordinate with latitude first (lat, time, lon) order = [1, 0, 2] # (lat, time, lon) self.cube_aux_lat.transpose(order) - weights = iris.analysis.cartography.cosine_latitude_weights( - self.cube_aux_lat - ) + weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_aux_lat) self.assertEqual(weights.shape, self.cube_aux_lat.shape) - self.assertArrayAlmostEqual( - weights[:, 0, :], np.cos(np.deg2rad(self.lat2d)) - ) + self.assertArrayAlmostEqual(weights[:, 0, :], np.cos(np.deg2rad(self.lat2d))) def test_cosine_latitude_weights_2d_latitude_last(self): # 2d latitude coordinate with latitude last (time, lon, lat) order = [0, 2, 1] # (time, lon, lat) self.cube_aux_lat.transpose(order) - weights = iris.analysis.cartography.cosine_latitude_weights( - self.cube_aux_lat - ) + weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_aux_lat) self.assertEqual(weights.shape, self.cube_aux_lat.shape) - self.assertArrayAlmostEqual( - weights[0, :, :], np.cos(np.deg2rad(self.lat2d.T)) - ) + self.assertArrayAlmostEqual(weights[0, :, :], np.cos(np.deg2rad(self.lat2d.T))) def test_cosine_latitude_weights_no_latitude(self): # no coordinate identified as latitude self.cube_dim_lat.remove_coord("grid_latitude") with self.assertRaises(ValueError): - _ = iris.analysis.cartography.cosine_latitude_weights( - self.cube_dim_lat - ) + _ = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) def test_cosine_latitude_weights_multiple_latitude(self): # two coordinates identified as latitude @@ -1518,18 +1403,14 @@ def setUp(self): self.cube = cube def test_non_mean_operator(self): - res_cube = self.cube.rolling_window( - "longitude", iris.analysis.MAX, window=2 - ) + res_cube = self.cube.rolling_window("longitude", iris.analysis.MAX, window=2) expected_result = np.array( [[10, 12, 18], [12, 14, 20], [18, 12, 10]], dtype=np.float64 ) self.assertArrayEqual(expected_result, res_cube.data) def test_longitude_simple(self): - res_cube = self.cube.rolling_window( - "longitude", iris.analysis.MEAN, window=2 - ) + res_cube = self.cube.rolling_window("longitude", iris.analysis.MEAN, window=2) expected_result = np.array( [[8.0, 11.0, 15.0], [10.0, 13.0, 17.0], [15.0, 11.0, 8.0]], @@ -1538,9 +1419,7 @@ def test_longitude_simple(self): self.assertArrayEqual(expected_result, res_cube.data) - self.assertCML( - res_cube, ("analysis", "rolling_window", "simple_longitude.cml") - ) + self.assertCML(res_cube, ("analysis", "rolling_window", "simple_longitude.cml")) self.assertRaises( ValueError, @@ -1559,9 +1438,7 @@ def test_longitude_masked(self): [False, False, False, False], ], ) - res_cube = self.cube.rolling_window( - "longitude", iris.analysis.MEAN, window=2 - ) + res_cube = self.cube.rolling_window("longitude", iris.analysis.MEAN, window=2) expected_result = np.ma.array( [[-99.0, -99.0, -99.0], [12.0, 12.0, -99.0], [15.0, 11.0, 8.0]], @@ -1587,17 +1464,13 @@ def test_longitude_circular(self): ) def test_different_length_windows(self): - res_cube = self.cube.rolling_window( - "longitude", iris.analysis.MEAN, window=4 - ) + res_cube = self.cube.rolling_window("longitude", iris.analysis.MEAN, window=4) expected_result = np.array([[11.5], [13.5], [11.5]], dtype=np.float64) self.assertArrayEqual(expected_result, res_cube.data) - self.assertCML( - res_cube, ("analysis", "rolling_window", "size_4_longitude.cml") - ) + self.assertCML(res_cube, ("analysis", "rolling_window", "size_4_longitude.cml")) # Window too long: self.assertRaises( @@ -1626,9 +1499,7 @@ def test_bad_coordinate(self): ) def test_latitude_simple(self): - res_cube = self.cube.rolling_window( - "latitude", iris.analysis.MEAN, window=2 - ) + res_cube = self.cube.rolling_window("latitude", iris.analysis.MEAN, window=2) expected_result = np.array( [[7.0, 11.0, 13.0, 19.0], [13.0, 12.0, 12.0, 13.0]], @@ -1637,9 +1508,7 @@ def test_latitude_simple(self): self.assertArrayEqual(expected_result, res_cube.data) - self.assertCML( - res_cube, ("analysis", "rolling_window", "simple_latitude.cml") - ) + self.assertCML(res_cube, ("analysis", "rolling_window", "simple_latitude.cml")) def test_mean_with_weights_consistency(self): # equal weights should be the same as the mean with no weights diff --git a/lib/iris/tests/test_analysis_calculus.py b/lib/iris/tests/test_analysis_calculus.py index 36e008f38e..513a939579 100644 --- a/lib/iris/tests/test_analysis_calculus.py +++ b/lib/iris/tests/test_analysis_calculus.py @@ -42,9 +42,7 @@ def test_delta_coord_lookup(self): coord_system=iris.coord_systems.OSGB(), ) cube.add_dim_coord(coord, 0) - delta = iris.analysis.calculus.cube_delta( - cube, "projection_x_coordinate" - ) + delta = iris.analysis.calculus.cube_delta(cube, "projection_x_coordinate") delta_coord = delta.coord("projection_x_coordinate") self.assertEqual(delta_coord, delta.coord(coord)) self.assertEqual(coord, cube.coord(delta_coord)) @@ -245,16 +243,12 @@ def test_cos(self): ) # Now that we have tested the points & bounds, remove them and just test the xml - cos_of_coord = cos_of_coord.copy( - points=np.array([1], dtype=np.float32) - ) + cos_of_coord = cos_of_coord.copy(points=np.array([1], dtype=np.float32)) cos_of_coord_radians = cos_of_coord_radians.copy( points=np.array([1], dtype=np.float32) ) - self.assertXMLElement( - cos_of_coord, ("analysis", "calculus", "cos_simple.xml") - ) + self.assertXMLElement(cos_of_coord, ("analysis", "calculus", "cos_simple.xml")) self.assertXMLElement( cos_of_coord_radians, ("analysis", "calculus", "cos_simple_radians.xml"), @@ -291,15 +285,11 @@ def setUp(self): def test_diff_wrt_lon(self): t = iris.analysis.calculus.differentiate(self.cube, "longitude") - self.assertCMLApproxData( - t, ("analysis", "calculus", "handmade2_wrt_lon.cml") - ) + self.assertCMLApproxData(t, ("analysis", "calculus", "handmade2_wrt_lon.cml")) def test_diff_wrt_lat(self): t = iris.analysis.calculus.differentiate(self.cube, "latitude") - self.assertCMLApproxData( - t, ("analysis", "calculus", "handmade2_wrt_lat.cml") - ) + self.assertCMLApproxData(t, ("analysis", "calculus", "handmade2_wrt_lat.cml")) class TestCalculusSimple2(tests.IrisTest): @@ -346,9 +336,7 @@ def setUp(self): 0, ) cube.add_aux_coord( - DimCoord( - np.arange(5, dtype=np.float32), long_name="y", units="count" - ), + DimCoord(np.arange(5, dtype=np.float32), long_name="y", units="count"), 1, ) @@ -356,27 +344,19 @@ def setUp(self): def test_diff_wrt_x(self): t = iris.analysis.calculus.differentiate(self.cube, "x") - self.assertCMLApproxData( - t, ("analysis", "calculus", "handmade_wrt_x.cml") - ) + self.assertCMLApproxData(t, ("analysis", "calculus", "handmade_wrt_x.cml")) def test_diff_wrt_y(self): t = iris.analysis.calculus.differentiate(self.cube, "y") - self.assertCMLApproxData( - t, ("analysis", "calculus", "handmade_wrt_y.cml") - ) + self.assertCMLApproxData(t, ("analysis", "calculus", "handmade_wrt_y.cml")) def test_diff_wrt_lon(self): t = iris.analysis.calculus.differentiate(self.cube, "longitude") - self.assertCMLApproxData( - t, ("analysis", "calculus", "handmade_wrt_lon.cml") - ) + self.assertCMLApproxData(t, ("analysis", "calculus", "handmade_wrt_lon.cml")) def test_diff_wrt_lat(self): t = iris.analysis.calculus.differentiate(self.cube, "latitude") - self.assertCMLApproxData( - t, ("analysis", "calculus", "handmade_wrt_lat.cml") - ) + self.assertCMLApproxData(t, ("analysis", "calculus", "handmade_wrt_lat.cml")) def test_delta_wrt_x(self): t = iris.analysis.calculus.cube_delta(self.cube, "x") @@ -418,15 +398,11 @@ def setUp(self): cube = iris.cube.Cube(data, standard_name="x_wind", units="km/h") cube.add_dim_coord( - DimCoord( - np.arange(5, dtype=np.float32), long_name="x", units="count" - ), + DimCoord(np.arange(5, dtype=np.float32), long_name="x", units="count"), 0, ) cube.add_dim_coord( - DimCoord( - np.arange(5, dtype=np.float32), long_name="y", units="count" - ), + DimCoord(np.arange(5, dtype=np.float32), long_name="y", units="count"), 1, ) @@ -595,9 +571,7 @@ def test_contrived_differential1(self): data = -sin_x_pts * y_ones result = df_dlon.copy(data=data) - np.testing.assert_array_almost_equal( - result.data, df_dlon.data, decimal=3 - ) + np.testing.assert_array_almost_equal(result.data, df_dlon.data, decimal=3) def test_contrived_differential2(self): # testing : diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index c0329b72d6..f234239dcc 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -99,9 +99,7 @@ def test_minus_array(self): self.assertArrayEqual(b.data[:, 1:2], b.data[:, 1:2]) # subtract an array of 1 dimension more than the cube - d_array = data_array.reshape( - data_array.shape[0], data_array.shape[1], 1 - ) + d_array = data_array.reshape(data_array.shape[0], data_array.shape[1], 1) self.assertRaises(ValueError, iris.analysis.maths.subtract, a, d_array) # Check that the subtraction has had no effect on the original @@ -204,9 +202,7 @@ def test_addition_fail(self): points=np.arange(a.shape[xdim]), long_name="x_coord", units="volts" ) - self.assertRaises( - ValueError, iris.analysis.maths.add, a, c_axis_length_fail - ) + self.assertRaises(ValueError, iris.analysis.maths.add, a, c_axis_length_fail) self.assertRaises( NotYetImplementedError, iris.analysis.maths.add, @@ -277,19 +273,13 @@ def test_apply_ufunc_fail(self): a = self.cube # should fail because 'blah' is a string, not a np.ufunc - self.assertRaises( - TypeError, iris.analysis.maths.apply_ufunc, "blah", a - ) + self.assertRaises(TypeError, iris.analysis.maths.apply_ufunc, "blah", a) # should fail because math.sqrt is not a np.ufunc - self.assertRaises( - TypeError, iris.analysis.maths.apply_ufunc, math.sqrt, a - ) + self.assertRaises(TypeError, iris.analysis.maths.apply_ufunc, math.sqrt, a) # should fail because np.frexp gives 2 arrays as output - self.assertRaises( - ValueError, iris.analysis.maths.apply_ufunc, np.frexp, a - ) + self.assertRaises(ValueError, iris.analysis.maths.apply_ufunc, np.frexp, a) def test_ifunc(self): a = self.cube @@ -311,9 +301,7 @@ def vec_mag(u, v): c = a.copy() + 2 vec_mag_ufunc = np.frompyfunc(vec_mag, 2, 1) - my_ifunc = iris.analysis.maths.IFunc( - vec_mag_ufunc, lambda a, b: (a + b).units - ) + my_ifunc = iris.analysis.maths.IFunc(vec_mag_ufunc, lambda a, b: (a + b).units) b = my_ifunc(a, c) self.assertCMLApproxData(b, ("analysis", "apply_ifunc_frompyfunc.cml")) @@ -356,9 +344,7 @@ def test_ifunc_call_fail(self): with self.assertRaises(ValueError): my_ifunc(a, a) - my_ifunc = iris.analysis.maths.IFunc( - np.multiply, lambda a: cf_units.Unit("1") - ) + my_ifunc = iris.analysis.maths.IFunc(np.multiply, lambda a: cf_units.Unit("1")) # should fail because giving 1 arguments to an ifunc that expects # 2 @@ -431,9 +417,7 @@ def test_divide_by_array(self): # test division by exactly the same shape data c = a / data_array self.assertArrayEqual(c.data, np.array(1, dtype=np.float32)) - self.assertCML( - c, ("analysis", "division_by_array.cml"), checksum=False - ) + self.assertCML(c, ("analysis", "division_by_array.cml"), checksum=False) # test division by array of fewer dimensions c = a / data_array[0, :] @@ -631,9 +615,7 @@ def vec_mag(u, v): c = a.copy() + 2 vec_mag_ufunc = np.frompyfunc(vec_mag, 2, 1) - my_ifunc = iris.analysis.maths.IFunc( - vec_mag_ufunc, lambda x, y: (x + y).units - ) + my_ifunc = iris.analysis.maths.IFunc(vec_mag_ufunc, lambda x, y: (x + y).units) b = my_ifunc(a, c) answer = (a.data**2 + c.data**2) ** 0.5 @@ -834,27 +816,19 @@ def test_operator_array(self): self.assertArrayAlmostEqual(result3.data, result4) def test_cube_itruediv__int(self): - with self.assertRaisesRegex( - ArithmeticError, "Cannot perform inplace division" - ): + with self.assertRaisesRegex(ArithmeticError, "Cannot perform inplace division"): operator.itruediv(self.cube_1i, self.cube_2i) def test_cube_itruediv__uint(self): - with self.assertRaisesRegex( - ArithmeticError, "Cannot perform inplace division" - ): + with self.assertRaisesRegex(ArithmeticError, "Cannot perform inplace division"): operator.itruediv(self.cube_1u, self.cube_2u) def test_int_cube_itruediv__scalar(self): - with self.assertRaisesRegex( - ArithmeticError, "Cannot perform inplace division" - ): + with self.assertRaisesRegex(ArithmeticError, "Cannot perform inplace division"): operator.itruediv(self.cube_1i, 5) def test_uint_cube_itruediv__scalar(self): - with self.assertRaisesRegex( - ArithmeticError, "Cannot perform inplace division" - ): + with self.assertRaisesRegex(ArithmeticError, "Cannot perform inplace division"): operator.itruediv(self.cube_1u, 5) diff --git a/lib/iris/tests/test_cartography.py b/lib/iris/tests/test_cartography.py index 58dccb78aa..0e70c2e069 100644 --- a/lib/iris/tests/test_cartography.py +++ b/lib/iris/tests/test_cartography.py @@ -33,9 +33,7 @@ def test_2d(self): (0, 1), ) cube.add_aux_coord( - iris.coords.AuxCoord( - np.arange(100, 112).reshape(3, 4), "longitude" - ), + iris.coords.AuxCoord(np.arange(100, 112).reshape(3, 4), "longitude"), (0, 1), ) x, y = iris.analysis.cartography.get_xy_grids(cube) @@ -48,14 +46,10 @@ def test_3d(self): (0, 1, 2), ) cube.add_aux_coord( - iris.coords.AuxCoord( - np.arange(100, 160).reshape(5, 3, 4), "longitude" - ), + iris.coords.AuxCoord(np.arange(100, 160).reshape(5, 3, 4), "longitude"), (0, 1, 2), ) - self.assertRaises( - ValueError, iris.analysis.cartography.get_xy_grids, cube - ) + self.assertRaises(ValueError, iris.analysis.cartography.get_xy_grids, cube) if __name__ == "__main__": diff --git a/lib/iris/tests/test_cdm.py b/lib/iris/tests/test_cdm.py index c748b9dfd4..a419cd6c45 100644 --- a/lib/iris/tests/test_cdm.py +++ b/lib/iris/tests/test_cdm.py @@ -52,12 +52,8 @@ def setUp(self): np.arange(12, dtype=np.int32).reshape((3, 4)), long_name="test cube", ) - self.x = iris.coords.DimCoord( - np.array([-7.5, 7.5, 22.5, 37.5]), long_name="x" - ) - self.y = iris.coords.DimCoord( - np.array([2.5, 7.5, 12.5]), long_name="y" - ) + self.x = iris.coords.DimCoord(np.array([-7.5, 7.5, 22.5, 37.5]), long_name="x") + self.y = iris.coords.DimCoord(np.array([2.5, 7.5, 12.5]), long_name="y") self.xy = iris.coords.AuxCoord( np.arange(12).reshape((3, 4)) * 3.0, long_name="xy" ) @@ -90,19 +86,13 @@ def test_add_dim_coord(self): self.cube.add_aux_coord(self.y, 0) # Can't add AuxCoord to dim_coords - y_other = iris.coords.AuxCoord( - np.array([2.5, 7.5, 12.5]), long_name="y_other" - ) + y_other = iris.coords.AuxCoord(np.array([2.5, 7.5, 12.5]), long_name="y_other") with self.assertRaises(ValueError): self.cube.add_dim_coord(y_other, 0) def test_add_scalar_coord(self): - scalar_dim_coord = iris.coords.DimCoord( - 23, long_name="scalar_dim_coord" - ) - scalar_aux_coord = iris.coords.AuxCoord( - 23, long_name="scalar_aux_coord" - ) + scalar_dim_coord = iris.coords.DimCoord(23, long_name="scalar_dim_coord") + scalar_aux_coord = iris.coords.AuxCoord(23, long_name="scalar_aux_coord") # Scalars cannot be in cube.dim_coords with self.assertRaises(TypeError): self.cube.add_dim_coord(scalar_dim_coord) @@ -128,9 +118,7 @@ def test_add_scalar_coord(self): cube = self.cube.copy() cube.add_aux_coord(scalar_dim_coord) cube.add_aux_coord(scalar_aux_coord) - self.assertEqual( - set(cube.aux_coords), {scalar_dim_coord, scalar_aux_coord} - ) + self.assertEqual(set(cube.aux_coords), {scalar_dim_coord, scalar_aux_coord}) # Various options for dims cube = self.cube.copy() @@ -296,21 +284,15 @@ def test_missing_coords(self): self.assertString( repr(cube), ("cdm", "str_repr", "missing_coords_cube.repr.txt") ) - self.assertString( - str(cube), ("cdm", "str_repr", "missing_coords_cube.str.txt") - ) + self.assertString(str(cube), ("cdm", "str_repr", "missing_coords_cube.str.txt")) @tests.skip_data def test_cubelist_string(self): cube_list = iris.cube.CubeList( [iris.tests.stock.realistic_4d(), iris.tests.stock.global_pp()] ) - self.assertString( - str(cube_list), ("cdm", "str_repr", "cubelist.__str__.txt") - ) - self.assertString( - repr(cube_list), ("cdm", "str_repr", "cubelist.__repr__.txt") - ) + self.assertString(str(cube_list), ("cdm", "str_repr", "cubelist.__str__.txt")) + self.assertString(repr(cube_list), ("cdm", "str_repr", "cubelist.__repr__.txt")) def test_basic_0d_cube(self): self.assertString( @@ -343,9 +325,7 @@ def test_similar_coord(self): lat2.attributes["test"] = "True" cube.add_aux_coord(lat2, [0]) - self.assertString( - str(cube), ("cdm", "str_repr", "similar.__str__.txt") - ) + self.assertString(str(cube), ("cdm", "str_repr", "similar.__str__.txt")) def test_cube_summary_cell_methods(self): cube = self.cube_2d.copy() @@ -380,9 +360,7 @@ def test_cube_summary_cell_methods(self): ) cube.add_cell_method(cm) - self.assertString( - str(cube), ("cdm", "str_repr", "cell_methods.__str__.txt") - ) + self.assertString(str(cube), ("cdm", "str_repr", "cell_methods.__str__.txt")) def test_cube_summary_alignment(self): # Test the cube summary dimension alignment and coord name clipping @@ -393,9 +371,7 @@ def test_cube_summary_alignment(self): "long_name that must be clipped because it is too long", ) cube.add_aux_coord(aux, 0) - aux = iris.coords.AuxCoord( - np.arange(11), long_name="This is a short long_name" - ) + aux = iris.coords.AuxCoord(np.arange(11), long_name="This is a short long_name") cube.add_aux_coord(aux, 0) self.assertString(str(cube), ("cdm", "str_repr", "simple.__str__.txt")) @@ -414,16 +390,12 @@ def setUp(self): ) def test_wrong_length_vector_coord(self): - wobble = iris.coords.DimCoord( - points=[1, 2], long_name="wobble", units="1" - ) + wobble = iris.coords.DimCoord(points=[1, 2], long_name="wobble", units="1") with self.assertRaises(ValueError): self.cube_2d.add_aux_coord(wobble, 0) def test_invalid_dimension_vector_coord(self): - wobble = iris.coords.DimCoord( - points=[1, 2], long_name="wobble", units="1" - ) + wobble = iris.coords.DimCoord(points=[1, 2], long_name="wobble", units="1") with self.assertRaises(ValueError): self.cube_2d.add_dim_coord(wobble, 99) @@ -443,9 +415,7 @@ def test_name(self): self.assertEqual([coord.name() for coord in coords], ["an_other"]) coords = self.t.coords("air_temperature") - self.assertEqual( - [coord.name() for coord in coords], ["air_temperature"] - ) + self.assertEqual([coord.name() for coord in coords], ["air_temperature"]) coords = self.t.coords("wibble") self.assertEqual(coords, []) @@ -454,25 +424,19 @@ def test_long_name(self): # Both standard_name and long_name defined coords = self.t.coords(long_name="custom long name") # coord.name() returns standard_name if available - self.assertEqual( - [coord.name() for coord in coords], ["air_temperature"] - ) + self.assertEqual([coord.name() for coord in coords], ["air_temperature"]) def test_standard_name(self): # Both standard_name and long_name defined coords = self.t.coords(standard_name="custom long name") self.assertEqual([coord.name() for coord in coords], []) coords = self.t.coords(standard_name="air_temperature") - self.assertEqual( - [coord.name() for coord in coords], ["air_temperature"] - ) + self.assertEqual([coord.name() for coord in coords], ["air_temperature"]) def test_var_name(self): coords = self.t.coords(var_name="custom_var_name") # Matching coord in test cube has a standard_name of 'air_temperature'. - self.assertEqual( - [coord.name() for coord in coords], ["air_temperature"] - ) + self.assertEqual([coord.name() for coord in coords], ["air_temperature"]) def test_axis(self): cube = self.t.copy() @@ -529,15 +493,11 @@ def test_dimensions(self): self.assertEqual(coords, []) coords = self.t.coords(dimensions=[0, 1]) - self.assertEqual( - [coord.name() for coord in coords], ["my_multi_dim_coord"] - ) + self.assertEqual([coord.name() for coord in coords], ["my_multi_dim_coord"]) def test_coord_dim_coords_keyword(self): coords = self.t.coords(dim_coords=True) - self.assertEqual( - set([coord.name() for coord in coords]), {"dim1", "dim2"} - ) + self.assertEqual(set([coord.name() for coord in coords]), {"dim1", "dim2"}) coords = self.t.coords(dim_coords=False) self.assertEqual( @@ -589,14 +549,10 @@ def test_indexing_of_0d_cube(self): self.assertRaises(IndexError, c.__getitem__, (slice(None, None),)) def test_cube_indexing_0d(self): - self.assertCML( - [self.t[0, 0]], ("cube_slice", "2d_to_0d_cube_slice.cml") - ) + self.assertCML([self.t[0, 0]], ("cube_slice", "2d_to_0d_cube_slice.cml")) def test_cube_indexing_1d(self): - self.assertCML( - [self.t[0, 0:]], ("cube_slice", "2d_to_1d_cube_slice.cml") - ) + self.assertCML([self.t[0, 0:]], ("cube_slice", "2d_to_1d_cube_slice.cml")) def test_cube_indexing_1d_multi_slice(self): self.assertCML( @@ -636,9 +592,7 @@ def test_cube_indexing_no_change(self): self.assertCML([self.t[0:, 0:]], ("cube_slice", "2d_orig.cml")) def test_cube_indexing_reverse_coords(self): - self.assertCML( - [self.t[::-1, ::-1]], ("cube_slice", "2d_to_2d_revesed.cml") - ) + self.assertCML([self.t[::-1, ::-1]], ("cube_slice", "2d_to_2d_revesed.cml")) def test_cube_indexing_no_residual_change(self): self.t[0:3] @@ -651,9 +605,7 @@ def test_overspecified(self): def test_ellipsis(self): self.assertCML([self.t[Ellipsis]], ("cube_slice", "2d_orig.cml")) self.assertCML([self.t[:, :, :]], ("cube_slice", "2d_orig.cml")) - self.assertCML( - [self.t[Ellipsis, Ellipsis]], ("cube_slice", "2d_orig.cml") - ) + self.assertCML([self.t[Ellipsis, Ellipsis]], ("cube_slice", "2d_orig.cml")) self.assertCML( [self.t[Ellipsis, Ellipsis, Ellipsis]], ("cube_slice", "2d_orig.cml"), @@ -798,9 +750,7 @@ def test_cube_extract_2d(self): ) def test_cube_extract_coord_which_does_not_exist(self): - self.assertEqual( - self.t.extract(iris.Constraint(doesnt_exist=8.1)), None - ) + self.assertEqual(self.t.extract(iris.Constraint(doesnt_exist=8.1)), None) def test_cube_extract_coord_with_non_existant_values(self): self.assertEqual(self.t.extract(iris.Constraint(dim1=8)), None) @@ -827,9 +777,7 @@ def test_cube_extract_by_coord_advanced(self): c = iris.coords.DimCoord( points, long_name="dim2", units="meters", bounds=bounds ) - self.assertCML( - self.t.subset(c), ("cube_slice", "2d_intersect_and_reverse.cml") - ) + self.assertCML(self.t.subset(c), ("cube_slice", "2d_intersect_and_reverse.cml")) @tests.skip_data @@ -1164,19 +1112,13 @@ def test_cube_empty_indexing(self): def test_real_data_cube_indexing(self): cube = self.cube[(0, 4, 5, 2), 0, 0] - self.assertCML( - cube, ("cube_slice", "real_data_dual_tuple_indexing1.cml") - ) + self.assertCML(cube, ("cube_slice", "real_data_dual_tuple_indexing1.cml")) cube = self.cube[0, (0, 4, 5, 2), (3, 5, 5)] - self.assertCML( - cube, ("cube_slice", "real_data_dual_tuple_indexing2.cml") - ) + self.assertCML(cube, ("cube_slice", "real_data_dual_tuple_indexing2.cml")) cube = self.cube[(0, 4, 5, 2), 0, (3, 5, 5)] - self.assertCML( - cube, ("cube_slice", "real_data_dual_tuple_indexing3.cml") - ) + self.assertCML(cube, ("cube_slice", "real_data_dual_tuple_indexing3.cml")) self.assertRaises( IndexError, @@ -1208,12 +1150,8 @@ def partial_compare(self, dual, single): single.name(), "dual and single stage standard_names differ", ) - self.assertEqual( - dual.units, single.units, "dual and single stage units differ" - ) - self.assertEqual( - dual.shape, single.shape, "dual and single stage shape differ" - ) + self.assertEqual(dual.units, single.units, "dual and single stage units differ") + self.assertEqual(dual.shape, single.shape, "dual and single stage shape differ") def collapse_test_common(self, cube, a_name, b_name, *args, **kwargs): # preserve filenames from before the introduction of "grid_" in rotated coord names. @@ -1270,12 +1208,8 @@ def test_multi_d(self): # Compare 2-stage collapsing with a single stage collapse # over 2 Coords. - self.collapse_test_common( - cube, "grid_latitude", "grid_longitude", rtol=1e-05 - ) - self.collapse_test_common( - cube, "grid_longitude", "grid_latitude", rtol=1e-05 - ) + self.collapse_test_common(cube, "grid_latitude", "grid_longitude", rtol=1e-05) + self.collapse_test_common(cube, "grid_longitude", "grid_latitude", rtol=1e-05) self.collapse_test_common(cube, "time", "grid_latitude", rtol=1e-05) self.collapse_test_common(cube, "grid_latitude", "time", rtol=1e-05) @@ -1297,19 +1231,11 @@ def test_multi_d(self): cube, "model_level_number", "grid_longitude", rtol=5e-04 ) - self.collapse_test_common( - cube, "time", "model_level_number", rtol=5e-04 - ) - self.collapse_test_common( - cube, "model_level_number", "time", rtol=5e-04 - ) + self.collapse_test_common(cube, "time", "model_level_number", rtol=5e-04) + self.collapse_test_common(cube, "model_level_number", "time", rtol=5e-04) - self.collapse_test_common( - cube, "model_level_number", "time", rtol=5e-04 - ) - self.collapse_test_common( - cube, "time", "model_level_number", rtol=5e-04 - ) + self.collapse_test_common(cube, "model_level_number", "time", rtol=5e-04) + self.collapse_test_common(cube, "time", "model_level_number", rtol=5e-04) # Collapse 3 things at once. triple_collapse = cube.collapsed( @@ -1318,7 +1244,7 @@ def test_multi_d(self): ) self.assertCMLApproxData( triple_collapse, - ("cube_collapsed", ("triple_collapse_ml_pt_" "lon.cml")), + ("cube_collapsed", ("triple_collapse_ml_pt_lon.cml")), rtol=5e-04, ) @@ -1327,7 +1253,7 @@ def test_multi_d(self): ) self.assertCMLApproxData( triple_collapse, - ("cube_collapsed", ("triple_collapse_lat_ml" "_pt.cml")), + ("cube_collapsed", ("triple_collapse_lat_ml_pt.cml")), rtol=0.05, ) # KNOWN PROBLEM: the previous 'rtol' is very large. @@ -1367,16 +1293,12 @@ def _load_3d_cube(self): # This 3D data set has a missing a slice with SOME missing values. # The missing data is in the pressure = 1000 hPa, forcast_period = 0, # time = 1970-02-11 16:00:00 slice. - return iris.load_cube( - tests.get_data_path(["PP", "mdi_handmade_small", "*.pp"]) - ) + return iris.load_cube(tests.get_data_path(["PP", "mdi_handmade_small", "*.pp"])) def test_complete_field(self): # This pp field has no missing data values cube = iris.load_cube( - tests.get_data_path( - ["PP", "mdi_handmade_small", "mdi_test_1000_3.pp"] - ) + tests.get_data_path(["PP", "mdi_handmade_small", "mdi_test_1000_3.pp"]) ) self.assertIsInstance(cube.data, np.ndarray) @@ -1384,9 +1306,7 @@ def test_complete_field(self): def test_masked_field(self): # This pp field has some missing data values cube = iris.load_cube( - tests.get_data_path( - ["PP", "mdi_handmade_small", "mdi_test_1000_0.pp"] - ) + tests.get_data_path(["PP", "mdi_handmade_small", "mdi_test_1000_0.pp"]) ) self.assertIsInstance(cube.data, ma.core.MaskedArray) @@ -1422,9 +1342,7 @@ def test_save_and_merge(self): masked_slice.data.fill_value = fill_value # test saving masked data - reference_txt_path = tests.get_result_path( - ("cdm", "masked_save_pp.txt") - ) + reference_txt_path = tests.get_result_path(("cdm", "masked_save_pp.txt")) with self.cube_save_test( reference_txt_path, reference_cubes=masked_slice ) as temp_pp_path: @@ -1438,9 +1356,7 @@ def test_save_and_merge(self): # make cube1 and cube2 differ on a scalar coord, to make them mergeable into a 3d cube cube2.coord("pressure").points = [1001.0] merged_cubes = iris.cube.CubeList([cube1, cube2]).merge() - self.assertEqual( - len(merged_cubes), 1, "expected a single merged cube" - ) + self.assertEqual(len(merged_cubes), 1, "expected a single merged cube") merged_cube = merged_cubes[0] self.assertEqual(merged_cube.dtype, dtype) # Check that the original masked-array fill-value is *ignored*. @@ -1468,9 +1384,7 @@ def test_coord_conversion(self): self.assertEqual(len(cube._as_list_of_coords([lat, lon])), 2) # Mix of string-like and coord - self.assertEqual( - len(cube._as_list_of_coords(["grid_latitude", lon])), 2 - ) + self.assertEqual(len(cube._as_list_of_coords(["grid_latitude", lon])), 2) # Empty list self.assertEqual(len(cube._as_list_of_coords([])), 0) diff --git a/lib/iris/tests/test_cell.py b/lib/iris/tests/test_cell.py index 3925d9b0a7..d5d11b8ee5 100644 --- a/lib/iris/tests/test_cell.py +++ b/lib/iris/tests/test_cell.py @@ -19,9 +19,7 @@ def setUp(self): def test_cell_from_coord(self): Cell = iris.coords.Cell - coord = iris.coords.AuxCoord( - np.arange(4) * 1.5, long_name="test", units="1" - ) + coord = iris.coords.AuxCoord(np.arange(4) * 1.5, long_name="test", units="1") self.assertEqual(Cell(point=0.0, bound=None), coord.cell(0)) self.assertEqual(Cell(point=1.5, bound=None), coord.cell(1)) self.assertEqual(Cell(point=4.5, bound=None), coord.cell(-1)) @@ -46,9 +44,7 @@ def test_cell_from_multidim_coord(self): bounds=np.arange(48).reshape(3, 4, 4), ) self.assertRaises(IndexError, coord.cell, 0) - self.assertEqual( - Cell(point=3, bound=(12, 13, 14, 15)), coord.cell((0, 3)) - ) + self.assertEqual(Cell(point=3, bound=(12, 13, 14, 15)), coord.cell((0, 3))) def test_mod(self): # Check that applying the mod function is not modifying the original @@ -69,9 +65,7 @@ def test_contains_point(self): self.assertTrue(c.contains_point(359.49951)) def test_pointless(self): - self.assertRaises( - ValueError, iris.coords.Cell, None, (359.49951, 359.5004) - ) + self.assertRaises(ValueError, iris.coords.Cell, None, (359.49951, 359.5004)) def test_add(self): # Check that applying the mod function is not modifying the original diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py index 70f24478d2..79eacd7797 100644 --- a/lib/iris/tests/test_cf.py +++ b/lib/iris/tests/test_cf.py @@ -98,9 +98,7 @@ def test_auxiliary_coordinates_pass_0(self): ) def test_bounds_pass_0(self): - self.assertEqual( - sorted(self.cfr.cf_group.bounds.keys()), ["time_bnds"] - ) + self.assertEqual(sorted(self.cfr.cf_group.bounds.keys()), ["time_bnds"]) time_bnds = self.cfr.cf_group["time_bnds"] self.assertEqual(time_bnds.shape, (4, 2)) @@ -149,9 +147,7 @@ def test_coordinates_pass_0(self): self.assertEqual(time.cf_attrs(), tuple(attr)) def test_data_pass_0(self): - self.assertEqual( - sorted(self.cfr.cf_group.data_variables.keys()), ["pr"] - ) + self.assertEqual(sorted(self.cfr.cf_group.data_variables.keys()), ["pr"]) data = self.cfr.cf_group["pr"] self.assertEqual(data.shape, (4, 190, 174)) @@ -207,18 +203,10 @@ def test_global_attributes_pass_0(self): ], ) - self.assertEqual( - self.cfr.cf_group.global_attributes["Conventions"], "CF-1.0" - ) - self.assertEqual( - self.cfr.cf_group.global_attributes["experiment"], "ER3" - ) - self.assertEqual( - self.cfr.cf_group.global_attributes["institution"], "DMI" - ) - self.assertEqual( - self.cfr.cf_group.global_attributes["source"], "HIRHAM" - ) + self.assertEqual(self.cfr.cf_group.global_attributes["Conventions"], "CF-1.0") + self.assertEqual(self.cfr.cf_group.global_attributes["experiment"], "ER3") + self.assertEqual(self.cfr.cf_group.global_attributes["institution"], "DMI") + self.assertEqual(self.cfr.cf_group.global_attributes["source"], "HIRHAM") def test_variable_cf_group_pass_0(self): self.assertEqual( @@ -257,9 +245,7 @@ def test_variable_attribute_touch_pass_0(self): lat.cf_attrs_used(), (("long_name", "latitude"), ("units", "degrees_north")), ) - self.assertEqual( - lat.cf_attrs_unused(), (("standard_name", "latitude"),) - ) + self.assertEqual(lat.cf_attrs_unused(), (("standard_name", "latitude"),)) # clear the attribute touch history. lat.cf_attrs_reset() @@ -281,9 +267,7 @@ def test_destructor(self): """ with self.temp_filename(suffix=".nc") as fn: with open(fn, "wb+") as fh: - fh.write( - b"\x89HDF\r\n\x1a\nBroken file with correct signature" - ) + fh.write(b"\x89HDF\r\n\x1a\nBroken file with correct signature") fh.flush() with io.StringIO() as buf: @@ -366,9 +350,7 @@ def set_up(self): yield def test_bounds(self): - time = self.cfr.cf_group["temp_dmax_tmean_abs"].cf_group.coordinates[ - "time" - ] + time = self.cfr.cf_group["temp_dmax_tmean_abs"].cf_group.coordinates["time"] climatology = time.cf_group.climatology self.assertEqual(len(climatology), 1) self.assertEqual(list(climatology.keys()), ["climatology_bounds"]) @@ -404,30 +386,18 @@ def test_label_dim_start(self): cf_data_var = self.cfr_start.cf_group["temp_dmax_tmean_abs"] region_group = self.cfr_start.cf_group.labels["region_name"] - self.assertEqual( - sorted(self.cfr_start.cf_group.labels.keys()), ["region_name"] - ) - self.assertEqual( - sorted(cf_data_var.cf_group.labels.keys()), ["region_name"] - ) + self.assertEqual(sorted(self.cfr_start.cf_group.labels.keys()), ["region_name"]) + self.assertEqual(sorted(cf_data_var.cf_group.labels.keys()), ["region_name"]) - self.assertEqual( - region_group.cf_label_dimensions(cf_data_var), ("georegion",) - ) + self.assertEqual(region_group.cf_label_dimensions(cf_data_var), ("georegion",)) self.assertEqual(region_group.cf_label_data(cf_data_var)[0], "Anglian") cf_data_var = self.cfr_start.cf_group["cdf_temp_dmax_tmean_abs"] - self.assertEqual( - sorted(self.cfr_start.cf_group.labels.keys()), ["region_name"] - ) - self.assertEqual( - sorted(cf_data_var.cf_group.labels.keys()), ["region_name"] - ) + self.assertEqual(sorted(self.cfr_start.cf_group.labels.keys()), ["region_name"]) + self.assertEqual(sorted(cf_data_var.cf_group.labels.keys()), ["region_name"]) - self.assertEqual( - region_group.cf_label_dimensions(cf_data_var), ("georegion",) - ) + self.assertEqual(region_group.cf_label_dimensions(cf_data_var), ("georegion",)) self.assertEqual(region_group.cf_label_data(cf_data_var)[0], "Anglian") def test_label_dim_end(self): @@ -449,9 +419,7 @@ def test_label_dim_end(self): ("ensemble",), ) self.assertEqual( - self.cfr_end.cf_group.labels["experiment_id"].cf_label_data( - cf_data_var - )[0], + self.cfr_end.cf_group.labels["experiment_id"].cf_label_data(cf_data_var)[0], "2005", ) @@ -462,22 +430,16 @@ def test_label_dim_end(self): ("ensemble",), ) self.assertEqual( - self.cfr_end.cf_group.labels["institution"].cf_label_data( - cf_data_var - )[0], + self.cfr_end.cf_group.labels["institution"].cf_label_data(cf_data_var)[0], "ECMWF", ) self.assertEqual( - self.cfr_end.cf_group.labels["source"].cf_label_dimensions( - cf_data_var - ), + self.cfr_end.cf_group.labels["source"].cf_label_dimensions(cf_data_var), ("ensemble",), ) self.assertEqual( - self.cfr_end.cf_group.labels["source"].cf_label_data(cf_data_var)[ - 0 - ], + self.cfr_end.cf_group.labels["source"].cf_label_data(cf_data_var)[0], "IFS33R1/HOPE-E, Sys 1, Met 1, ENSEMBLES", ) diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index e3a1d2eaf3..e55d7d86fd 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -34,9 +34,7 @@ exclusion = ["Makefile", "build"] DOCS_DIRS = glob(os.path.join(DOCS_DIR, "*")) DOCS_DIRS = [ - DOC_DIR - for DOC_DIR in DOCS_DIRS - if os.path.basename(DOC_DIR) not in exclusion + DOC_DIR for DOC_DIR in DOCS_DIRS if os.path.basename(DOC_DIR) not in exclusion ] # Get a dirpath to the git repository : allow setting with an environment # variable, so Travis can test for headers in the repo, not the installation. @@ -90,21 +88,16 @@ def test_python_versions(): ( pyproject_toml_file, "\n ".join( - [ - f'"Programming Language :: Python :: {ver}",' - for ver in all_supported - ] + [f'"Programming Language :: Python :: {ver}",' for ver in all_supported] ), ), ( nox_file, - "_PY_VERSIONS_ALL = [" - + ", ".join([f'"{ver}"' for ver in all_supported]), + "_PY_VERSIONS_ALL = [" + ", ".join([f'"{ver}"' for ver in all_supported]), ), ( ci_wheels_file, - "python-version: [" - + ", ".join([f'"{ver}"' for ver in all_supported]), + "python-version: [" + ", ".join([f'"{ver}"' for ver in all_supported]), ), ( ci_tests_file, @@ -159,18 +152,14 @@ class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): file_text = file_path.read_text() parsed = ast.parse(source=file_text) calls = filter(lambda node: hasattr(node, "func"), ast.walk(parsed)) - warn_calls = filter( - lambda c: getattr(c.func, "attr", None) == "warn", calls - ) + warn_calls = filter(lambda c: getattr(c.func, "attr", None) == "warn", calls) warn_call: ast.Call for warn_call in warn_calls: warn_ref = f"{file_path}:{warn_call.lineno}" tmp_list.append(warn_ref) - category_kwargs = filter( - lambda k: k.arg == "category", warn_call.keywords - ) + category_kwargs = filter(lambda k: k.arg == "category", warn_call.keywords) category_kwarg: ast.keyword = next(category_kwargs, None) if category_kwarg is None: @@ -268,10 +257,7 @@ def test_license_headers(self): last_change_by_fname = self.last_change_by_fname() except ValueError as err: # Caught the case where this is not a git repo. - msg = ( - "Iris installation did not look like a git repo?" - "\nERR = {}\n\n" - ) + msg = "Iris installation did not look like a git repo?\nERR = {}\n\n" return self.skipTest(msg.format(str(err))) failed = False diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index 7d28d48c31..7957eca2ac 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -108,9 +108,7 @@ def _make_cube( payload = np.arange(y_size * x_size, dtype=np.float32).reshape( y_size, x_size ) - coord = AuxCoord( - payload * 100 + offset, long_name="xy-aux", units="1" - ) + coord = AuxCoord(payload * 100 + offset, long_name="xy-aux", units="1") cube.add_aux_coord(coord, (0, 1)) if cell_measure is not None: @@ -123,9 +121,7 @@ def _make_cube( cube.add_cell_measure(cm, (1,)) if "xy" in cell_measure: payload = x_range + y_range[:, np.newaxis] - cm = CellMeasure( - payload * 100 + offset, long_name="xy-aux", units="1" - ) + cm = CellMeasure(payload * 100 + offset, long_name="xy-aux", units="1") cube.add_cell_measure(cm, (0, 1)) if ancil is not None: @@ -159,9 +155,7 @@ def _make_cube( payload = np.arange(y_size * x_size, dtype=np.float32).reshape( y_size, x_size ) - orog = AuxCoord( - payload * 100 + offset, long_name="orog", units="m" - ) + orog = AuxCoord(payload * 100 + offset, long_name="orog", units="m") cube.add_aux_coord(orog, (0, 1)) else: raise NotImplementedError() @@ -253,25 +247,19 @@ def _make_cube_3d(x, y, z, data, aux=None, offset=0): payload = np.arange(x_size * z_size, dtype=np.float32).reshape( z_size, x_size ) - coord = AuxCoord( - payload * 10 + offset, long_name="xz-aux", units="1" - ) + coord = AuxCoord(payload * 10 + offset, long_name="xz-aux", units="1") cube.add_aux_coord(coord, (0, 2)) if "yz" in aux: payload = np.arange(y_size * z_size, dtype=np.float32).reshape( z_size, y_size ) - coord = AuxCoord( - payload * 100 + offset, long_name="yz-aux", units="1" - ) + coord = AuxCoord(payload * 100 + offset, long_name="yz-aux", units="1") cube.add_aux_coord(coord, (0, 1)) if "xyz" in aux: - payload = np.arange( - x_size * y_size * z_size, dtype=np.float32 - ).reshape(z_size, y_size, x_size) - coord = AuxCoord( - payload * 1000 + offset, long_name="xyz-aux", units="1" + payload = np.arange(x_size * y_size * z_size, dtype=np.float32).reshape( + z_size, y_size, x_size ) + coord = AuxCoord(payload * 1000 + offset, long_name="xyz-aux", units="1") cube.add_aux_coord(coord, (0, 1, 2)) return cube @@ -363,9 +351,7 @@ def test_bounds_overlap_increasing(self): y = (0, 2) cubes.append(_make_cube((0, 2), y, 1)) cube = _make_cube((2, 4), y, 1) - cube.coord("x").bounds = np.array( - [[0.5, 2.5], [2.5, 3.5]], dtype=np.float32 - ) + cube.coord("x").bounds = np.array([[0.5, 2.5], [2.5, 3.5]], dtype=np.float32) cubes.append(cube) with pytest.warns( IrisUserWarning, @@ -379,9 +365,7 @@ def test_bounds_overlap_decreasing(self): y = (0, 2) cubes.append(_make_cube((3, 1, -1), y, 1)) cube = _make_cube((1, -1, -1), y, 2) - cube.coord("x").bounds = np.array( - [[2.5, 0.5], [0.5, -0.5]], dtype=np.float32 - ) + cube.coord("x").bounds = np.array([[2.5, 0.5], [0.5, -0.5]], dtype=np.float32) cubes.append(cube) with pytest.warns( IrisUserWarning, @@ -783,9 +767,7 @@ def test_concat_2x2d_aux_xy_bounds(self): ).reshape(2, 2, 4) cubes.append(cube) result = concatenate(cubes) - self.assertCML( - result, ("concatenate", "concat_2x2d_aux_xy_bounds.cml") - ) + self.assertCML(result, ("concatenate", "concat_2x2d_aux_xy_bounds.cml")) self.assertEqual(len(result), 1) self.assertEqual(result[0].shape, (2, 4)) @@ -831,9 +813,9 @@ def test_concat_lazy_aux_coords(self): for cube in cubes: cube.data = cube.lazy_data() cube.coord("xy-aux").points = cube.coord("xy-aux").lazy_points() - bounds = da.arange( - 4 * cube.coord("xy-aux").core_points().size - ).reshape(cube.shape + (4,)) + bounds = da.arange(4 * cube.coord("xy-aux").core_points().size).reshape( + cube.shape + (4,) + ) cube.coord("xy-aux").bounds = bounds result = concatenate(cubes) @@ -958,17 +940,13 @@ def test_concat_scalar_4x2d_aux_xy(self): cubes.append(_make_cube((0, 2), y, 7, aux="xy", offset=3, scalar=20)) cubes.append(_make_cube((2, 4), y, 8, aux="xy", offset=4, scalar=20)) result = concatenate(cubes) - self.assertCML( - result, ("concatenate", "concat_scalar_4x2d_aux_xy.cml") - ) + self.assertCML(result, ("concatenate", "concat_scalar_4x2d_aux_xy.cml")) self.assertEqual(len(result), 2) for cube in result: self.assertEqual(cube.shape, (4, 4)) merged = result.merge() - self.assertCML( - merged, ("concatenate", "concat_merged_scalar_4x2d_aux_xy.cml") - ) + self.assertCML(merged, ("concatenate", "concat_merged_scalar_4x2d_aux_xy.cml")) self.assertEqual(len(merged), 1) self.assertEqual(merged[0].shape, (2, 4, 4)) @@ -980,9 +958,7 @@ def test_concat_scalar_4x2d_aux_xy(self): self.assertEqual(len(merged), 4) result = concatenate(merged) - self.assertCML( - result, ("concatenate", "concat_merged_scalar_4x2d_aux_xy.cml") - ) + self.assertCML(result, ("concatenate", "concat_merged_scalar_4x2d_aux_xy.cml")) self.assertEqual(len(result), 1) self.assertEqual(result[0].shape, (2, 4, 4)) @@ -1003,17 +979,13 @@ def test_concat_scalar_4y2d_aux_xy(self): cubes.append(_make_cube(x, (0, 2), 7, aux="xy", offset=3, scalar=20)) cubes.append(_make_cube(x, (2, 4), 8, aux="xy", offset=4, scalar=20)) result = concatenate(cubes) - self.assertCML( - result, ("concatenate", "concat_scalar_4y2d_aux_xy.cml") - ) + self.assertCML(result, ("concatenate", "concat_scalar_4y2d_aux_xy.cml")) self.assertEqual(len(result), 2) for cube in result: self.assertEqual(cube.shape, (4, 4)) merged = result.merge() - self.assertCML( - merged, ("concatenate", "concat_merged_scalar_4y2d_aux_xy.cml") - ) + self.assertCML(merged, ("concatenate", "concat_merged_scalar_4y2d_aux_xy.cml")) self.assertEqual(len(merged), 1) self.assertEqual(merged[0].shape, (2, 4, 4)) @@ -1026,41 +998,21 @@ def test_concat_scalar_4y2d_aux_xy(self): result = concatenate(merged) self.assertEqual(len(result), 1) - self.assertCML( - result, ("concatenate", "concat_merged_scalar_4y2d_aux_xy.cml") - ) + self.assertCML(result, ("concatenate", "concat_merged_scalar_4y2d_aux_xy.cml")) self.assertEqual(result[0].shape, (2, 4, 4)) def test_concat_scalar_4mix2d_aux_xy(self): cubes = iris.cube.CubeList() - cubes.append( - _make_cube((0, 2), (0, 2), 1, aux="xy", offset=1, scalar=10) - ) - cubes.append( - _make_cube((2, 4), (2, 4), 8, aux="xy", offset=4, scalar=20) - ) - cubes.append( - _make_cube((0, 2), (0, 2), 5, aux="xy", offset=1, scalar=20) - ) - cubes.append( - _make_cube((2, 4), (0, 2), 2, aux="xy", offset=2, scalar=10) - ) - cubes.append( - _make_cube((0, 2), (2, 4), 7, aux="xy", offset=3, scalar=20) - ) - cubes.append( - _make_cube((0, 2), (2, 4), 3, aux="xy", offset=3, scalar=10) - ) - cubes.append( - _make_cube((2, 4), (2, 4), 4, aux="xy", offset=4, scalar=10) - ) - cubes.append( - _make_cube((2, 4), (0, 2), 6, aux="xy", offset=2, scalar=20) - ) + cubes.append(_make_cube((0, 2), (0, 2), 1, aux="xy", offset=1, scalar=10)) + cubes.append(_make_cube((2, 4), (2, 4), 8, aux="xy", offset=4, scalar=20)) + cubes.append(_make_cube((0, 2), (0, 2), 5, aux="xy", offset=1, scalar=20)) + cubes.append(_make_cube((2, 4), (0, 2), 2, aux="xy", offset=2, scalar=10)) + cubes.append(_make_cube((0, 2), (2, 4), 7, aux="xy", offset=3, scalar=20)) + cubes.append(_make_cube((0, 2), (2, 4), 3, aux="xy", offset=3, scalar=10)) + cubes.append(_make_cube((2, 4), (2, 4), 4, aux="xy", offset=4, scalar=10)) + cubes.append(_make_cube((2, 4), (0, 2), 6, aux="xy", offset=2, scalar=20)) result = concatenate(cubes) - self.assertCML( - result, ("concatenate", "concat_scalar_4mix2d_aux_xy.cml") - ) + self.assertCML(result, ("concatenate", "concat_scalar_4mix2d_aux_xy.cml")) self.assertEqual(len(result), 2) for cube in result: self.assertEqual(cube.shape, (4, 4)) diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py index b034525ff2..dbc7aa402f 100644 --- a/lib/iris/tests/test_constraints.py +++ b/lib/iris/tests/test_constraints.py @@ -44,9 +44,7 @@ def test_constraints(self): sub_list = self.slices.extract(constraint) self.assertEqual(len(sub_list), 2 * 6) - constraint = iris.Constraint( - model_level_number=lambda c: (c > 30) | (c <= 3) - ) + constraint = iris.Constraint(model_level_number=lambda c: (c > 30) | (c <= 3)) sub_list = self.slices.extract(constraint) self.assertEqual(len(sub_list), 43 * 6) @@ -118,9 +116,7 @@ class ConstraintMixin: """ def setUp(self): - self.dec_path = tests.get_data_path( - ["PP", "globClim1", "dec_subset.pp"] - ) + self.dec_path = tests.get_data_path(["PP", "globClim1", "dec_subset.pp"]) self.theta_path = tests.get_data_path(["PP", "globClim1", "theta.pp"]) self.humidity = iris.Constraint(SN_SPECIFIC_HUMIDITY) @@ -140,12 +136,8 @@ def setUp(self): ) # bound based coord constraint - self.level_height_of_model_level_number_10 = iris.Constraint( - level_height=1900 - ) - self.model_level_number_10_22 = iris.Constraint( - model_level_number=[10, 22] - ) + self.level_height_of_model_level_number_10 = iris.Constraint(level_height=1900) + self.model_level_number_10_22 = iris.Constraint(model_level_number=[10, 22]) # Invalid constraints self.pressure_950 = iris.Constraint(model_level_number=950) @@ -210,9 +202,7 @@ def test_string_standard_name(self): cubes = self.load_match( self.dec_path, - iris.Constraint( - SN_AIR_POTENTIAL_TEMPERATURE, model_level_number=10 - ), + iris.Constraint(SN_AIR_POTENTIAL_TEMPERATURE, model_level_number=10), ) self.fixup_sigma_to_be_aux(cubes) self.assertCML(cubes, "theta_10") @@ -277,9 +267,7 @@ def test_invalid_constraint(self): self.load_match(self.theta_path, self.pressure_950) def test_dual_atomic_constraint(self): - cubes = self.load_match( - self.dec_path, [self.theta, self.level_10 & self.theta] - ) + cubes = self.load_match(self.dec_path, [self.theta, self.level_10 & self.theta]) self.fixup_sigma_to_be_aux(cubes) self.assertCML(cubes, "theta_and_theta_10") @@ -413,9 +401,7 @@ def test_standard_name(self): def test_standard_name__None(self): cube = self.cubes[self.index] cube.standard_name = None - constraint = NameConstraint( - standard_name=None, long_name=self.long_name - ) + constraint = NameConstraint(standard_name=None, long_name=self.long_name) result = self.cubes.extract_cube(constraint) self.assertIsNotNone(result) self.assertIsNone(result.standard_name) @@ -435,8 +421,7 @@ def test_long_name(self): # Match - callable. kwargs = dict( - long_name=lambda item: item is not None - and item.startswith("air pot") + long_name=lambda item: item is not None and item.startswith("air pot") ) constraint = NameConstraint(**kwargs) result = self.cubes.extract_cube(constraint) @@ -446,9 +431,7 @@ def test_long_name(self): def test_long_name__None(self): cube = self.cubes[self.index] cube.long_name = None - constraint = NameConstraint( - standard_name=self.standard_name, long_name=None - ) + constraint = NameConstraint(standard_name=self.standard_name, long_name=None) result = self.cubes.extract_cube(constraint) self.assertIsNotNone(result) self.assertEqual(result.standard_name, self.standard_name) @@ -476,9 +459,7 @@ def test_var_name(self): def test_var_name__None(self): cube = self.cubes[self.index] cube.var_name = None - constraint = NameConstraint( - standard_name=self.standard_name, var_name=None - ) + constraint = NameConstraint(standard_name=self.standard_name, var_name=None) result = self.cubes.extract_cube(constraint) self.assertIsNotNone(result) self.assertEqual(result.standard_name, self.standard_name) @@ -505,9 +486,7 @@ def test_stash(self): def test_stash__None(self): cube = self.cubes[self.index] del cube.attributes["STASH"] - constraint = NameConstraint( - standard_name=self.standard_name, STASH=None - ) + constraint = NameConstraint(standard_name=self.standard_name, STASH=None) result = self.cubes.extract_cube(constraint) self.assertIsNotNone(result) self.assertEqual(result.standard_name, self.standard_name) @@ -679,15 +658,9 @@ def test_constraint_expressions(self): def test_string_repr(self): rt = repr(iris.Constraint(SN_AIR_POTENTIAL_TEMPERATURE)) - self.assertEqual( - rt, "Constraint(name='%s')" % SN_AIR_POTENTIAL_TEMPERATURE - ) + self.assertEqual(rt, "Constraint(name='%s')" % SN_AIR_POTENTIAL_TEMPERATURE) - rt = repr( - iris.Constraint( - SN_AIR_POTENTIAL_TEMPERATURE, model_level_number=10 - ) - ) + rt = repr(iris.Constraint(SN_AIR_POTENTIAL_TEMPERATURE, model_level_number=10)) self.assertEqual( rt, "Constraint(name='%s', coord_values={'model_level_number': 10})" @@ -724,9 +697,7 @@ def test_le_ge(self): self.run_test(function, numbers, results) def test_lt_gt(self): - function = iris.util.between( - 2, 4, rh_inclusive=False, lh_inclusive=False - ) + function = iris.util.between(2, 4, rh_inclusive=False, lh_inclusive=False) numbers = [1, 2, 3, 4, 5] results = [False, False, True, False, False] self.run_test(function, numbers, results) diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 5eb12ba1f3..70cbd15899 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -63,12 +63,8 @@ def test_multidim(self): bound_shape = a.shape + (2,) a.bounds = np.arange(np.prod(bound_shape)).reshape(bound_shape) b = a[(0, 2), (0, -1)] - np.testing.assert_array_equal( - b.points, a.points[(0, 2), :][:, (0, -1)] - ) - np.testing.assert_array_equal( - b.bounds, a.bounds[(0, 2), :, :][:, (0, -1), :] - ) + np.testing.assert_array_equal(b.points, a.points[(0, 2), :][:, (0, -1)]) + np.testing.assert_array_equal(b.bounds, a.bounds[(0, 2), :, :][:, (0, -1), :]) class TestCoordIntersection(tests.IrisTest): @@ -98,9 +94,7 @@ def setUp(self): ], dtype=np.float32, ) - self.b = iris.coords.AuxCoord( - pts, long_name="foo", units="meter", bounds=bnds - ) + self.b = iris.coords.AuxCoord(pts, long_name="foo", units="meter", bounds=bnds) def test_basic_intersection(self): inds = self.a.intersect(self.b, return_indices=True) @@ -151,12 +145,8 @@ def test_commutative(self): cube = iris.tests.stock.realistic_4d() coord = cube.coord("grid_longitude") offset_coord = coord.copy() - offset_coord = offset_coord - ( - offset_coord.points[20] - offset_coord.points[0] - ) - self.assertEqual( - coord.intersect(offset_coord), offset_coord.intersect(coord) - ) + offset_coord = offset_coord - (offset_coord.points[20] - offset_coord.points[0]) + self.assertEqual(coord.intersect(offset_coord), offset_coord.intersect(coord)) class TestXML(tests.IrisTest): @@ -186,14 +176,10 @@ def setUp(self): self.height = cube.coord("level_height")[:10] def test_DimCoord_repr(self): - self.assertRepr( - self.lat, ("coord_api", "str_repr", "dim_nontime_repr.txt") - ) + self.assertRepr(self.lat, ("coord_api", "str_repr", "dim_nontime_repr.txt")) def test_AuxCoord_repr(self): - self.assertRepr( - self.height, ("coord_api", "str_repr", "aux_nontime_repr.txt") - ) + self.assertRepr(self.height, ("coord_api", "str_repr", "aux_nontime_repr.txt")) def test_DimCoord_str(self): self.assertString( @@ -214,31 +200,21 @@ def setUp(self): self.fp = cube.coord("forecast_period") def test_DimCoord_repr(self): - self.assertRepr( - self.time, ("coord_api", "str_repr", "dim_time_repr.txt") - ) + self.assertRepr(self.time, ("coord_api", "str_repr", "dim_time_repr.txt")) def test_AuxCoord_repr(self): - self.assertRepr( - self.fp, ("coord_api", "str_repr", "aux_time_repr.txt") - ) + self.assertRepr(self.fp, ("coord_api", "str_repr", "aux_time_repr.txt")) def test_DimCoord_str(self): - self.assertString( - str(self.time), ("coord_api", "str_repr", "dim_time_str.txt") - ) + self.assertString(str(self.time), ("coord_api", "str_repr", "dim_time_str.txt")) def test_AuxCoord_str(self): - self.assertString( - str(self.fp), ("coord_api", "str_repr", "aux_time_str.txt") - ) + self.assertString(str(self.fp), ("coord_api", "str_repr", "aux_time_str.txt")) class TestAuxCoordCreation(tests.IrisTest): def test_basic(self): - a = iris.coords.AuxCoord( - np.arange(10), "air_temperature", units="kelvin" - ) + a = iris.coords.AuxCoord(np.arange(10), "air_temperature", units="kelvin") result = "\n".join( [ "AuxCoord : air_temperature / (kelvin)", @@ -250,9 +226,7 @@ def test_basic(self): ) self.assertEqual(result, str(a)) - b = iris.coords.AuxCoord( - list(range(10)), attributes={"monty": "python"} - ) + b = iris.coords.AuxCoord(list(range(10)), attributes={"monty": "python"}) result = "\n".join( [ "AuxCoord : unknown / (unknown)", @@ -274,9 +248,7 @@ def test_excluded_attributes(self): attributes={"standard_name": "whoopsy"}, ) - a = iris.coords.AuxCoord( - np.arange(10), "air_temperature", units="kelvin" - ) + a = iris.coords.AuxCoord(np.arange(10), "air_temperature", units="kelvin") with self.assertRaises(ValueError): a.attributes["standard_name"] = "whoopsy" with self.assertRaises(ValueError): @@ -341,9 +313,7 @@ def test_AuxCoord_fromcoord(self): class TestDimCoordCreation(tests.IrisTest): def test_basic(self): - a = iris.coords.DimCoord( - np.arange(10), "air_temperature", units="kelvin" - ) + a = iris.coords.DimCoord(np.arange(10), "air_temperature", units="kelvin") result = "\n".join( [ "DimCoord : air_temperature / (kelvin)", @@ -355,9 +325,7 @@ def test_basic(self): ) self.assertEqual(result, str(a)) - b = iris.coords.DimCoord( - list(range(10)), attributes={"monty": "python"} - ) + b = iris.coords.DimCoord(list(range(10)), attributes={"monty": "python"}) result = "\n".join( [ "DimCoord : unknown / (unknown)", @@ -379,9 +347,7 @@ def test_excluded_attributes(self): attributes={"standard_name": "whoopsy"}, ) - a = iris.coords.DimCoord( - np.arange(10), "air_temperature", units="kelvin" - ) + a = iris.coords.DimCoord(np.arange(10), "air_temperature", units="kelvin") with self.assertRaises(ValueError): a.attributes["standard_name"] = "whoopsy" with self.assertRaises(ValueError): @@ -447,9 +413,7 @@ def test_dim_coord_restrictions(self): # masked bounds emsg = "bounds array must not be masked" with self.assertRaisesRegex(TypeError, emsg): - iris.coords.DimCoord( - [1], bounds=ma.masked_array([[0, 2]], mask=True) - ) + iris.coords.DimCoord([1], bounds=ma.masked_array([[0, 2]], mask=True)) # shapes of points and bounds msg = "The shape of the 'unknown' DimCoord bounds array should be" with self.assertRaisesRegex(ValueError, msg): @@ -506,12 +470,8 @@ def test_DimCoord_from_regular(self): circular=False, ) - coord = iris.coords.DimCoord.from_regular( - zeroth, step, count, **kwargs - ) - expected_points = np.arange( - zeroth + step, zeroth + (count + 1) * step, step - ) + coord = iris.coords.DimCoord.from_regular(zeroth, step, count, **kwargs) + expected_points = np.arange(zeroth + step, zeroth + (count + 1) * step, step) expected = iris.coords.DimCoord(expected_points, **kwargs) self.assertIsInstance(coord, iris.coords.DimCoord) self.assertEqual(coord, expected) @@ -533,9 +493,7 @@ def test_DimCoord_from_regular_with_bounds(self): coord = iris.coords.DimCoord.from_regular( zeroth, step, count, with_bounds=True, **kwargs ) - expected_points = np.arange( - zeroth + step, zeroth + (count + 1) * step, step - ) + expected_points = np.arange(zeroth + step, zeroth + (count + 1) * step, step) expected_bounds = np.transpose( [expected_points - 0.5 * step, expected_points + 0.5 * step] ) @@ -596,9 +554,7 @@ def test_neg(self): self._build_coord(start=8) r_expl = -self.lon np.testing.assert_array_equal(r_expl.points, -(self.lon.points)) - self.assertXMLElement( - r_expl, ("coord_api", "coord_maths", "negate_expl.xml") - ) + self.assertXMLElement(r_expl, ("coord_api", "coord_maths", "negate_expl.xml")) def test_right_subtract(self): r_expl = 10 - self.lon @@ -809,9 +765,7 @@ def test_scalar(self): self.assertTrue(coord.is_contiguous()) def test_equal_int(self): - coord = iris.coords.DimCoord( - [0, 10, 20], bounds=[[0, 10], [10, 20], [20, 30]] - ) + coord = iris.coords.DimCoord([0, 10, 20], bounds=[[0, 10], [10, 20], [20, 30]]) self.assertTrue(coord.is_contiguous()) def test_equal_float(self): @@ -824,9 +778,7 @@ def test_guessed_bounds(self): delta = np.float64(0.00001) lower = -1.0 + delta upper = 3.0 - delta - points, step = np.linspace( - lower, upper, 2, endpoint=False, retstep=True - ) + points, step = np.linspace(lower, upper, 2, endpoint=False, retstep=True) points += step * 0.5 coord = iris.coords.DimCoord(points) coord.guess_bounds() @@ -1021,9 +973,7 @@ class Terry: self.assertIs(dim.__ne__(aux), NotImplemented) def test_climatological(self): - co1 = iris.coords.DimCoord( - [0], bounds=[[0, 1]], units="days since 1970-01-01" - ) + co1 = iris.coords.DimCoord([0], bounds=[[0, 1]], units="days since 1970-01-01") co2 = co1.copy() co2.climatological = True self.assertNotEqual(co1, co2) diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index e62a94f080..b7cf1aee93 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -64,9 +64,7 @@ class TestCoordSystemSame(tests.IrisTest): def setUp(self): self.cs1 = iris.coord_systems.GeogCS(6371229) self.cs2 = iris.coord_systems.GeogCS(6371229) - self.cs3 = iris.coord_systems.RotatedGeogCS( - 30, 30, ellipsoid=GeogCS(6371229) - ) + self.cs3 = iris.coord_systems.RotatedGeogCS(30, 30, ellipsoid=GeogCS(6371229)) def test_simple(self): a = self.cs1 @@ -101,9 +99,7 @@ def test_different_public_attributes(self): class Test_CoordSystem_xml_element(tests.IrisTest): def test_rotated(self): cs = RotatedGeogCS(30, 40, ellipsoid=GeogCS(6371229)) - self.assertXMLElement( - cs, ("coord_systems", "CoordSystem_xml_element.xml") - ) + self.assertXMLElement(cs, ("coord_systems", "CoordSystem_xml_element.xml")) class Test_GeogCS_construction(tests.IrisTest): @@ -115,20 +111,12 @@ def test_sphere_param(self): self.assertXMLElement(cs, ("coord_systems", "GeogCS_init_sphere.xml")) def test_no_major(self): - cs = GeogCS( - semi_minor_axis=6500000, inverse_flattening=151.42814163388104 - ) - self.assertXMLElement( - cs, ("coord_systems", "GeogCS_init_no_major.xml") - ) + cs = GeogCS(semi_minor_axis=6500000, inverse_flattening=151.42814163388104) + self.assertXMLElement(cs, ("coord_systems", "GeogCS_init_no_major.xml")) def test_no_minor(self): - cs = GeogCS( - semi_major_axis=6543210, inverse_flattening=151.42814163388104 - ) - self.assertXMLElement( - cs, ("coord_systems", "GeogCS_init_no_minor.xml") - ) + cs = GeogCS(semi_major_axis=6543210, inverse_flattening=151.42814163388104) + self.assertXMLElement(cs, ("coord_systems", "GeogCS_init_no_minor.xml")) def test_no_invf(self): cs = GeogCS(semi_major_axis=6543210, semi_minor_axis=6500000) @@ -153,18 +141,14 @@ def test_invalid_ellipsoid_params(self): class Test_GeogCS_repr(tests.IrisTest): def test_repr(self): cs = GeogCS(6543210, 6500000) - expected = ( - "GeogCS(semi_major_axis=6543210.0, semi_minor_axis=6500000.0)" - ) + expected = "GeogCS(semi_major_axis=6543210.0, semi_minor_axis=6500000.0)" self.assertEqual(expected, repr(cs)) class Test_GeogCS_str(tests.IrisTest): def test_str(self): cs = GeogCS(6543210, 6500000) - expected = ( - "GeogCS(semi_major_axis=6543210.0, semi_minor_axis=6500000.0)" - ) + expected = "GeogCS(semi_major_axis=6543210.0, semi_minor_axis=6500000.0)" self.assertEqual(expected, str(cs)) @@ -359,14 +343,10 @@ def test_init(self): self.assertXMLElement(rcs, ("coord_systems", "RotatedGeogCS_init.xml")) rcs = RotatedGeogCS(30, 40, north_pole_grid_longitude=50) - self.assertXMLElement( - rcs, ("coord_systems", "RotatedGeogCS_init_a.xml") - ) + self.assertXMLElement(rcs, ("coord_systems", "RotatedGeogCS_init_a.xml")) rcs = RotatedGeogCS(30, 40) - self.assertXMLElement( - rcs, ("coord_systems", "RotatedGeogCS_init_b.xml") - ) + self.assertXMLElement(rcs, ("coord_systems", "RotatedGeogCS_init_b.xml")) class Test_RotatedGeogCS_repr(tests.IrisTest): @@ -418,9 +398,7 @@ def test_str(self): class Test_TransverseMercator_construction(tests.IrisTest): def test_osgb(self): tm = osgb() - self.assertXMLElement( - tm, ("coord_systems", "TransverseMercator_osgb.xml") - ) + self.assertXMLElement(tm, ("coord_systems", "TransverseMercator_osgb.xml")) class Test_TransverseMercator_repr(tests.IrisTest): @@ -441,9 +419,7 @@ def test_as_cartopy_crs(self): false_easting = -40000.0 false_northing = 10000.0 scale_factor_at_central_meridian = 0.9996012717 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) + ellipsoid = GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909) tmerc_cs = TransverseMercator( latitude_of_projection_origin, @@ -478,9 +454,7 @@ def test_as_cartopy_projection(self): false_easting = -40000.0 false_northing = 10000.0 scale_factor_at_central_meridian = 0.9996012717 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) + ellipsoid = GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909) tmerc_cs = TransverseMercator( latitude_of_projection_origin, @@ -553,9 +527,7 @@ def test_default_none(self): def test_set_persist(self): cs = GeogCS.from_datum(datum="WGS84") cartopy_crs = cs.as_cartopy_crs() - self.assertMultiLineEqual( - cartopy_crs.datum.name, "World Geodetic System 1984" - ) + self.assertMultiLineEqual(cartopy_crs.datum.name, "World Geodetic System 1984") cs = GeogCS.from_datum(datum="OSGB36") cartopy_crs = cs.as_cartopy_crs() diff --git a/lib/iris/tests/test_cube_to_pp.py b/lib/iris/tests/test_cube_to_pp.py index 1a6be27f9c..372c4c7ac9 100644 --- a/lib/iris/tests/test_cube_to_pp.py +++ b/lib/iris/tests/test_cube_to_pp.py @@ -88,9 +88,7 @@ def test_pp_save_rules(self): in_filename = tests.get_data_path(("PP", "simple_pp", "global.pp")) cubes = iris.load(in_filename, callback=itab_callback) - reference_txt_path = tests.get_result_path( - ("cube_to_pp", "simple.txt") - ) + reference_txt_path = tests.get_result_path(("cube_to_pp", "simple.txt")) with self.cube_save_test( reference_txt_path, reference_cubes=cubes ) as temp_pp_path: @@ -102,18 +100,14 @@ def test_pp_append_singles(self): # load 2 arrays of >2D cubes cube = stock.simple_pp() - reference_txt_path = tests.get_result_path( - ("cube_to_pp", "append_single.txt") - ) + reference_txt_path = tests.get_result_path(("cube_to_pp", "append_single.txt")) with self.cube_save_test( reference_txt_path, reference_cubes=[cube, cube] ) as temp_pp_path: iris.save(cube, temp_pp_path) # Create file iris.save(cube, temp_pp_path, append=True) # Append to file - reference_txt_path = tests.get_result_path( - ("cube_to_pp", "replace_single.txt") - ) + reference_txt_path = tests.get_result_path(("cube_to_pp", "replace_single.txt")) with self.cube_save_test( reference_txt_path, reference_cubes=cube ) as temp_pp_path: @@ -127,18 +121,14 @@ def test_pp_append_lists(self): cube_4d = stock.realistic_4d() cubes = [cube_4d[i, :2, :, :] for i in range(4)] - reference_txt_path = tests.get_result_path( - ("cube_to_pp", "append_multi.txt") - ) + reference_txt_path = tests.get_result_path(("cube_to_pp", "append_multi.txt")) with self.cube_save_test( reference_txt_path, reference_cubes=cubes ) as temp_pp_path: iris.save(cubes[:2], temp_pp_path) iris.save(cubes[2:], temp_pp_path, append=True) - reference_txt_path = tests.get_result_path( - ("cube_to_pp", "replace_multi.txt") - ) + reference_txt_path = tests.get_result_path(("cube_to_pp", "replace_multi.txt")) with self.cube_save_test( reference_txt_path, reference_cubes=cubes[2:] ) as temp_pp_path: @@ -168,9 +158,7 @@ def add_coords_to_cube_and_test(self, coord1, coord2): reference_cubes=cm, field_coords=[coord1.name(), coord2.name()], ) as temp_pp_path: - iris.save( - cm, temp_pp_path, field_coords=[coord1.name(), coord2.name()] - ) + iris.save(cm, temp_pp_path, field_coords=[coord1.name(), coord2.name()]) # test with coord with self.cube_save_test( reference_txt_path, @@ -203,9 +191,7 @@ def test_non_standard_cross_sections(self): ) self.add_coords_to_cube_and_test( - iris.coords.DimCoord( - f.z, long_name="depth", units="m", bounds=f.z_bounds - ), + iris.coords.DimCoord(f.z, long_name="depth", units="m", bounds=f.z_bounds), iris.coords.DimCoord( f.y, standard_name="latitude", @@ -216,9 +202,7 @@ def test_non_standard_cross_sections(self): ) self.add_coords_to_cube_and_test( - iris.coords.DimCoord( - f.z, long_name="eta", units="1", bounds=f.z_bounds - ), + iris.coords.DimCoord(f.z, long_name="eta", units="1", bounds=f.z_bounds), iris.coords.DimCoord( f.y, standard_name="latitude", @@ -314,13 +298,9 @@ def test_default_coord_system(self): pp_cube.coord_system(GeogCS), iris.coord_systems.GeogCS ) self.assertIsNotNone(pp_cube.coord_system(None)) - self.assertIsInstance( - pp_cube.coord_system(None), iris.coord_systems.GeogCS - ) + self.assertIsInstance(pp_cube.coord_system(None), iris.coord_systems.GeogCS) self.assertIsNotNone(pp_cube.coord_system()) - self.assertIsInstance( - pp_cube.coord_system(), iris.coord_systems.GeogCS - ) + self.assertIsInstance(pp_cube.coord_system(), iris.coord_systems.GeogCS) def lbproc_from_pp(self, filename): # Gets the lbproc field from the ppfile @@ -384,9 +364,7 @@ def test_lbvc(self): expected = ([2, 1, -5.0], [2, 2, -10.0], [2, 3, -15.0], [2, 4, -20.0]) - for field, (lbvc, lblev, blev) in zip( - fields_from_cube(cube), expected - ): + for field, (lbvc, lblev, blev) in zip(fields_from_cube(cube), expected): self.assertEqual(field.lbvc, lbvc) self.assertEqual(field.lblev, lblev) self.assertEqual(field.blev, blev) diff --git a/lib/iris/tests/test_ff.py b/lib/iris/tests/test_ff.py index 1abfafdac1..45d059a332 100644 --- a/lib/iris/tests/test_ff.py +++ b/lib/iris/tests/test_ff.py @@ -80,9 +80,7 @@ def test_constructor(self): ) self.assertEqual(self.ff_header.integer_constants.shape, (46,)) self.assertEqual(self.ff_header.real_constants.shape, (38,)) - self.assertEqual( - self.ff_header.level_dependent_constants.shape, (71, 8) - ) + self.assertEqual(self.ff_header.level_dependent_constants.shape, (71, 8)) self.assertIsNone(self.ff_header.row_dependent_constants) self.assertIsNone(self.ff_header.column_dependent_constants) self.assertIsNone(self.ff_header.fields_of_constants) @@ -119,9 +117,7 @@ def test_unit_pass_0(self): cube = cubes.pop(0) standard_name = cube.standard_name cube_by_name[standard_name] += 1 - filename = "{}_{}.cml".format( - standard_name, cube_by_name[standard_name] - ) + filename = "{}_{}.cml".format(standard_name, cube_by_name[standard_name]) self.assertCML(cube, ("FF", filename)) def test_raw_to_table_count(self): @@ -195,12 +191,12 @@ def _check_stash(self, stash, x_coord, y_coord): self.assertArrayEqual( x_coord, field.x, - ("x_coord was incorrect for " "stash {}".format(stash)), + ("x_coord was incorrect for stash {}".format(stash)), ) self.assertArrayEqual( y_coord, field.y, - ("y_coord was incorrect for " "stash {}".format(stash)), + ("y_coord was incorrect for stash {}".format(stash)), ) def test_p(self): diff --git a/lib/iris/tests/test_file_save.py b/lib/iris/tests/test_file_save.py index dc901db715..918b0690b3 100644 --- a/lib/iris/tests/test_file_save.py +++ b/lib/iris/tests/test_file_save.py @@ -42,9 +42,7 @@ def save_by_filename(filename1, filename2, cube, saver_fn, iosaver=None): ) # Optional iris.io.find_saver passed in from test -def save_by_filehandle( - filehandle1, filehandle2, cube, fn_saver, binary_mode=True -): +def save_by_filehandle(filehandle1, filehandle2, cube, fn_saver, binary_mode=True): """Saves a cube to two different filehandles using iris.save and the save method of the object representing the file type directly""" mode = "wb" if binary_mode else "w" @@ -86,9 +84,7 @@ class TestSavePP(TestSaveMethods): def test_filename(self): # Save using iris.save and pp.save - save_by_filename( - self.temp_filename1, self.temp_filename2, self.cube1, pp.save - ) + save_by_filename(self.temp_filename1, self.temp_filename2, self.cube1, pp.save) # Compare files self.assertEqual( @@ -149,9 +145,7 @@ class TestSaveDot(TestSaveMethods): def test_filename(self): # Save using iris.save and dot.save - save_by_filename( - self.temp_filename1, self.temp_filename2, self.cube1, dot.save - ) + save_by_filename(self.temp_filename1, self.temp_filename2, self.cube1, dot.save) # Compare files self.assertEqual( @@ -203,8 +197,7 @@ def test_bytesio(self): self.assertEqual( data, sio.getvalue(), - "Mismatch in data when comparing iris bytesio save " - "and dot.save.", + "Mismatch in data when comparing iris bytesio save and dot.save.", ) diff --git a/lib/iris/tests/test_hybrid.py b/lib/iris/tests/test_hybrid.py index e3e5076650..d45730e4f8 100644 --- a/lib/iris/tests/test_hybrid.py +++ b/lib/iris/tests/test_hybrid.py @@ -35,9 +35,7 @@ def test_metadata(self): self.assertEqual(self.altitude.attributes, {"positive": "up"}) def test_points(self): - self.assertAlmostEqual( - self.altitude.points.min(), np.float32(191.84892) - ) + self.assertAlmostEqual(self.altitude.points.min(), np.float32(191.84892)) self.assertAlmostEqual(self.altitude.points.max(), np.float32(40000)) def test_transpose(self): @@ -65,11 +63,7 @@ def test_removing_sigma(self): # Check the factory now only has surface_altitude and delta dependencies. factory = cube.aux_factory(name="altitude") - t = [ - key - for key, coord in factory.dependencies.items() - if coord is not None - ] + t = [key for key, coord in factory.dependencies.items() if coord is not None] self.assertCountEqual(t, ["orography", "delta"]) def test_removing_orography(self): @@ -81,11 +75,7 @@ def test_removing_orography(self): # Check the factory now only has sigma and delta dependencies. factory = cube.aux_factory(name="altitude") - t = [ - key - for key, coord in factory.dependencies.items() - if coord is not None - ] + t = [key for key, coord in factory.dependencies.items() if coord is not None] self.assertCountEqual(t, ["sigma", "delta"]) def test_derived_coords(self): @@ -216,9 +206,7 @@ def test_invalid_dependencies(self): # Cause all warnings to raise Exceptions warnings.simplefilter("error") with self.assertRaises(IrisIgnoringBoundsWarning): - _ = HybridPressureFactory( - sigma=sigma, surface_air_pressure=sigma - ) + _ = HybridPressureFactory(sigma=sigma, surface_air_pressure=sigma) def test_bounded_surface_pressure(self): # Start with everything normal diff --git a/lib/iris/tests/test_imports.py b/lib/iris/tests/test_imports.py index 46c7cae723..55595d48b4 100644 --- a/lib/iris/tests/test_imports.py +++ b/lib/iris/tests/test_imports.py @@ -39,13 +39,9 @@ def itree(path, prefix=None, debug=False): print(emsg.format(package)) raise for child in children: - parent = ( - f"{prefix}.{child.stem}" if child.is_dir() else prefix - ) + parent = f"{prefix}.{child.stem}" if child.is_dir() else prefix TestImports.itree(child, prefix=parent, debug=debug) - elif ( - path.is_file() and path.name not in IGNORE and path.suffix == ".py" - ): + elif path.is_file() and path.name not in IGNORE and path.suffix == ".py": package = f"{prefix}.{path.stem}" if debug: print(f"import {package}") diff --git a/lib/iris/tests/test_intersect.py b/lib/iris/tests/test_intersect.py index 29603f61a8..b77b63b549 100644 --- a/lib/iris/tests/test_intersect.py +++ b/lib/iris/tests/test_intersect.py @@ -54,9 +54,7 @@ def test_simple_intersect(self): 0, ) cube.add_aux_coord( - iris.coords.DimCoord( - points=np.int32(11), long_name="pressure", units="Pa" - ) + iris.coords.DimCoord(points=np.int32(11), long_name="pressure", units="Pa") ) cube.rename("temperature") cube.units = "K" @@ -94,9 +92,7 @@ def test_simple_intersect(self): 0, ) cube2.add_aux_coord( - iris.coords.DimCoord( - points=np.int32(11), long_name="pressure", units="Pa" - ) + iris.coords.DimCoord(points=np.int32(11), long_name="pressure", units="Pa") ) cube2.rename("") diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index 57b6c2a963..bb0f5c0d17 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -28,9 +28,7 @@ def test_decode_uri__str(self): "file", uri, ), - ( - uri := "file:///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp" - ): ( + (uri := "file:///data/local/someDir/PP/COLPEX/COLPEX_16a_pj001.pp"): ( uri[:4], uri[5:], ), diff --git a/lib/iris/tests/test_iterate.py b/lib/iris/tests/test_iterate.py index 1bee6db74f..bd185d1232 100644 --- a/lib/iris/tests/test_iterate.py +++ b/lib/iris/tests/test_iterate.py @@ -97,9 +97,7 @@ def test_izip_missing_slice_coords(self): self.cube_b.coord("grid_latitude").rename("latitude") self.cube_b.coord("grid_longitude").rename("longitude") with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - iris.iterate.izip( - self.cube_a, self.cube_b, coords=self.coord_names - ) + iris.iterate.izip(self.cube_a, self.cube_b, coords=self.coord_names) def test_izip_onecube_no_coords(self): # Should do the same as slices() but bearing in mind izip.next() @@ -222,9 +220,7 @@ def test_izip_subcube_of_same(self): for super_slice, sub_slice in iris.iterate.izip( super_cube, sub_cube, coords=self.coord_names ): - self.assertEqual( - sub_slice, sub_cube - ) # This cube should not change + self.assertEqual(sub_slice, sub_cube) # This cube should not change # as lat and long are the only # data dimensions in this cube) self.assertEqual(super_slice, next(super_slice_iterator)) @@ -338,9 +334,7 @@ def test_izip_extra_dim(self): i, j = next(ij_iterator) if random.random() < check_eq_probability: big_slice_truth = big_cube[i, :, j] - little_slice_truth = ( - little_cube # Just 1d so slice is entire cube - ) + little_slice_truth = little_cube # Just 1d so slice is entire cube self.assertEqual(little_slice_truth, little_slice) self.assertEqual(big_slice_truth, big_slice) count += 1 @@ -362,13 +356,9 @@ def test_izip_different_valued_coords(self): longitude = self.cube_b.coord("grid_longitude") # Same coord metadata and shape, but different values - check it produces a warning with warnings.catch_warnings(): - warnings.simplefilter( - "error" - ) # Cause all warnings to raise Exceptions + warnings.simplefilter("error") # Cause all warnings to raise Exceptions with self.assertRaises(IrisUserWarning): - iris.iterate.izip( - self.cube_a, self.cube_b, coords=self.coord_names - ) + iris.iterate.izip(self.cube_a, self.cube_b, coords=self.coord_names) # Call with coordinates, rather than names with self.assertRaises(IrisUserWarning): iris.iterate.izip( @@ -455,9 +445,7 @@ def test_izip_use_in_analysis(self): def test_izip_nd_non_ortho(self): cube1 = iris.cube.Cube(np.zeros((5, 5, 5))) - cube1.add_aux_coord( - iris.coords.AuxCoord(np.arange(5), long_name="z"), [0] - ) + cube1.add_aux_coord(iris.coords.AuxCoord(np.arange(5), long_name="z"), [0]) cube1.add_aux_coord( iris.coords.AuxCoord(np.arange(25).reshape(5, 5), long_name="y"), [1, 2], @@ -475,9 +463,7 @@ def test_izip_nd_non_ortho(self): def test_izip_nd_ortho(self): cube1 = iris.cube.Cube(np.zeros((5, 5, 5, 5, 5), dtype="f8")) cube1.add_dim_coord( - iris.coords.DimCoord( - np.arange(5, dtype="i8"), long_name="z", units="1" - ), + iris.coords.DimCoord(np.arange(5, dtype="i8"), long_name="z", units="1"), [0], ) cube1.add_aux_coord( @@ -567,9 +553,7 @@ def test_izip_no_common_coords_on_step_dim(self): # izip should step through them as a product. slice_a_iterator = self.cube_a.slices(self.coord_names) slice_b_iterator = self.cube_b.slices(self.coord_names) - product_iterator = itertools.product( - slice_a_iterator, slice_b_iterator - ) + product_iterator = itertools.product(slice_a_iterator, slice_b_iterator) nslices = self.cube_a.shape[0] * self.cube_b.shape[0] count = 0 for slice_a, slice_b in iris.iterate.izip( diff --git a/lib/iris/tests/test_lazy_aggregate_by.py b/lib/iris/tests/test_lazy_aggregate_by.py index 690198c25a..908ed90bf8 100644 --- a/lib/iris/tests/test_lazy_aggregate_by.py +++ b/lib/iris/tests/test_lazy_aggregate_by.py @@ -16,14 +16,10 @@ def setUp(self): self.cube_single.data = as_lazy_data(self.cube_single.data) self.cube_multi.data = as_lazy_data(self.cube_multi.data) - self.cube_single_masked.data = as_lazy_data( - self.cube_single_masked.data - ) + self.cube_single_masked.data = as_lazy_data(self.cube_single_masked.data) self.cube_multi_masked.data = as_lazy_data(self.cube_multi_masked.data) self.cube_easy.data = as_lazy_data(self.cube_easy.data) - self.cube_easy_weighted.data = as_lazy_data( - self.cube_easy_weighted.data - ) + self.cube_easy_weighted.data = as_lazy_data(self.cube_easy_weighted.data) assert self.cube_single.has_lazy_data() assert self.cube_multi.has_lazy_data() @@ -48,9 +44,7 @@ class TestLazyAggregateByWeightedByCube(TestLazyAggregateBy): def setUp(self): super().setUp() - self.weights_single = self.cube_single[:, 0, 0].copy( - self.weights_single - ) + self.weights_single = self.cube_single[:, 0, 0].copy(self.weights_single) self.weights_single.units = "m2" self.weights_multi = self.cube_multi[:, 0, 0].copy(self.weights_multi) self.weights_multi.units = "m2" diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 93ea4ef913..61de385cf1 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -26,9 +26,7 @@ def test_normal(self): self.assertEqual(len(cubes), 1) def test_path_object(self): - paths = ( - pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), - ) + paths = (pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])),) cubes = iris.load(paths) self.assertEqual(len(cubes), 1) @@ -82,9 +80,7 @@ def test_normal(self): _ = iris.load_cube(paths) def test_path_object(self): - paths = ( - pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), - ) + paths = (pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])),) _ = iris.load_cube(paths) def test_not_enough(self): @@ -109,9 +105,7 @@ def test_normal(self): self.assertEqual(len(cubes), 1) def test_path_object(self): - paths = ( - pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), - ) + paths = (pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])),) cubes = iris.load_cubes(paths) self.assertEqual(len(cubes), 1) @@ -142,9 +136,7 @@ def test_normal(self): self.assertEqual(len(cubes), 1) def test_path_object(self): - paths = ( - pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])), - ) + paths = (pathlib.Path(tests.get_data_path(["PP", "aPPglob1", "global.pp"])),) cubes = iris.load_raw(paths) self.assertEqual(len(cubes), 1) diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py index 6ea4571630..bf17f0ca6d 100644 --- a/lib/iris/tests/test_mapping.py +++ b/lib/iris/tests/test_mapping.py @@ -104,9 +104,7 @@ def test_simple(self): class TestMappingSubRegion(tests.GraphicsTest): def setUp(self): super().setUp() - cube_path = tests.get_data_path( - ("PP", "aPProt1", "rotatedMHtimecube.pp") - ) + cube_path = tests.get_data_path(("PP", "aPProt1", "rotatedMHtimecube.pp")) cube = iris.load_cube(cube_path)[0] # make the data smaller to speed things up. self.cube = cube[::10, ::10] @@ -159,9 +157,7 @@ def setUp(self): self.cube = iris.tests.stock.global_pp() self.few = 4 self.few_levels = list(range(280, 300, 5)) - self.many_levels = np.linspace( - self.cube.data.min(), self.cube.data.max(), 40 - ) + self.many_levels = np.linspace(self.cube.data.min(), self.cube.data.max(), 40) def test_simple(self): iplt.contour(self.cube) @@ -220,9 +216,7 @@ def test_default_projection_and_extent(self): [0.0, 360.0, -89.99995422, 89.99998474], ) np_testing.assert_array_almost_equal( - iplt.default_projection_extent( - self.cube, mode=iris.coords.BOUND_MODE - ), + iplt.default_projection_extent(self.cube, mode=iris.coords.BOUND_MODE), [-1.875046, 358.124954, -90, 90], ) diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py index 7c11fde55d..9a92334c5e 100644 --- a/lib/iris/tests/test_merge.py +++ b/lib/iris/tests/test_merge.py @@ -48,9 +48,7 @@ def test_remerge(self): def test_duplication(self): cubes = iris.load(self._data_path) - self.assertRaises( - iris.exceptions.DuplicateDataError, (cubes + cubes).merge - ) + self.assertRaises(iris.exceptions.DuplicateDataError, (cubes + cubes).merge) cubes2 = (cubes + cubes).merge(unique=False) self.assertEqual(len(cubes2), 2 * len(cubes)) @@ -66,9 +64,7 @@ def setUp(self): @tests.skip_data class TestMultiCube(tests.IrisTest, MergeMixin): def setUp(self): - self._data_path = tests.get_data_path( - ("PP", "globClim1", "dec_subset.pp") - ) + self._data_path = tests.get_data_path(("PP", "globClim1", "dec_subset.pp")) self._num_cubes = 4 self._prefix = "dec" @@ -169,9 +165,7 @@ def _check_fill_value(self, result, fill0="none", fill1="none"): if expected_fill_value is None: data = result.data if ma.isMaskedArray(data): - np_fill_value = ma.masked_array( - 0, dtype=result.dtype - ).fill_value + np_fill_value = ma.masked_array(0, dtype=result.dtype).fill_value self.assertEqual(data.fill_value, np_fill_value) else: data = result.data @@ -185,9 +179,7 @@ def setUp(self): fill_combos = itertools.product([None, fill_value], [fill_value, None]) single_fill_combos = itertools.product([None, fill_value]) self.combos = itertools.product(self.lazy_combos, fill_combos) - self.mixed_combos = itertools.product( - self.lazy_combos, single_fill_combos - ) + self.mixed_combos = itertools.product(self.lazy_combos, single_fill_combos) def test__ndarray_ndarray(self): for lazy0, lazy1 in self.lazy_combos: @@ -308,18 +300,13 @@ def test_maksed_array_preserved(self): def test_fill_value_invariant_to_order__same_non_None(self): fill_value = 1234 - cubes = [ - self._make_cube(i, mask=True, fill_value=fill_value) - for i in range(3) - ] + cubes = [self._make_cube(i, mask=True, fill_value=fill_value) for i in range(3)] for combo in itertools.permutations(cubes): result = iris.cube.CubeList(combo).merge_cube() self.assertEqual(result.data.fill_value, fill_value) def test_fill_value_invariant_to_order__all_None(self): - cubes = [ - self._make_cube(i, mask=True, fill_value=None) for i in range(3) - ] + cubes = [self._make_cube(i, mask=True, fill_value=None) for i in range(3)] for combo in itertools.permutations(cubes): result = iris.cube.CubeList(combo).merge_cube() np_fill_value = ma.masked_array(0, dtype=result.dtype).fill_value @@ -349,9 +336,7 @@ def test_fill_value_invariant_to_order__mixed(self): class TestDataMerge(tests.IrisTest): def test_extended_proxy_data(self): # Get the empty theta cubes for T+1.5 and T+2 - data_path = tests.get_data_path( - ("PP", "COLPEX", "theta_and_orog_subset.pp") - ) + data_path = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) phenom_constraint = iris.Constraint("air_potential_temperature") datetime_1 = datetime.datetime(2009, 9, 9, 17, 20) datetime_2 = datetime.datetime(2009, 9, 9, 17, 50) @@ -368,9 +353,7 @@ def test_extended_proxy_data(self): self.assertCML(cubes, ("merge", "theta_two_times.cml")) # Make sure we get the same result directly from load - cubes = iris.load_cube( - data_path, phenom_constraint & time_constraint_1_and_2 - ) + cubes = iris.load_cube(data_path, phenom_constraint & time_constraint_1_and_2) self.assertCML(cubes, ("merge", "theta_two_times.cml")) def test_real_data(self): @@ -471,9 +454,7 @@ def _make_cube(self, a, b, c, d, data=0): for name, value in zip(["a", "b", "c", "d"], [a, b, c, d]): dtype = np.str_ if isinstance(value, str) else np.float32 cube.add_aux_coord( - AuxCoord( - np.array([value], dtype=dtype), long_name=name, units="1" - ) + AuxCoord(np.array([value], dtype=dtype), long_name=name, units="1") ) return cube @@ -481,19 +462,13 @@ def _make_cube(self, a, b, c, d, data=0): def test_separable_combination(self): cubes = iris.cube.CubeList() cubes.append( - self._make_cube( - "2005", "ECMWF", "HOPE-E, Sys 1, Met 1, ENSEMBLES", 0 - ) + self._make_cube("2005", "ECMWF", "HOPE-E, Sys 1, Met 1, ENSEMBLES", 0) ) cubes.append( - self._make_cube( - "2005", "ECMWF", "HOPE-E, Sys 1, Met 1, ENSEMBLES", 1 - ) + self._make_cube("2005", "ECMWF", "HOPE-E, Sys 1, Met 1, ENSEMBLES", 1) ) cubes.append( - self._make_cube( - "2005", "ECMWF", "HOPE-E, Sys 1, Met 1, ENSEMBLES", 2 - ) + self._make_cube("2005", "ECMWF", "HOPE-E, Sys 1, Met 1, ENSEMBLES", 2) ) cubes.append( self._make_cube( @@ -511,34 +486,22 @@ def test_separable_combination(self): ) ) cubes.append( - self._make_cube( - "2002", "CERFACS", "GELATO, Sys 0, Met 1, ENSEMBLES", 0 - ) + self._make_cube("2002", "CERFACS", "GELATO, Sys 0, Met 1, ENSEMBLES", 0) ) cubes.append( - self._make_cube( - "2002", "CERFACS", "GELATO, Sys 0, Met 1, ENSEMBLES", 1 - ) + self._make_cube("2002", "CERFACS", "GELATO, Sys 0, Met 1, ENSEMBLES", 1) ) cubes.append( - self._make_cube( - "2002", "CERFACS", "GELATO, Sys 0, Met 1, ENSEMBLES", 2 - ) + self._make_cube("2002", "CERFACS", "GELATO, Sys 0, Met 1, ENSEMBLES", 2) ) cubes.append( - self._make_cube( - "2002", "IFM-GEOMAR", "ECHAM5, Sys 1, Met 10, ENSEMBLES", 0 - ) + self._make_cube("2002", "IFM-GEOMAR", "ECHAM5, Sys 1, Met 10, ENSEMBLES", 0) ) cubes.append( - self._make_cube( - "2002", "IFM-GEOMAR", "ECHAM5, Sys 1, Met 10, ENSEMBLES", 1 - ) + self._make_cube("2002", "IFM-GEOMAR", "ECHAM5, Sys 1, Met 10, ENSEMBLES", 1) ) cubes.append( - self._make_cube( - "2002", "IFM-GEOMAR", "ECHAM5, Sys 1, Met 10, ENSEMBLES", 2 - ) + self._make_cube("2002", "IFM-GEOMAR", "ECHAM5, Sys 1, Met 10, ENSEMBLES", 2) ) cubes.append( self._make_cube( @@ -586,9 +549,7 @@ def test_separable_combination(self): ) ) cube = cubes.merge() - self.assertCML( - cube, ("merge", "separable_combination.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "separable_combination.cml"), checksum=False) class TestDimSelection(tests.IrisTest): @@ -616,9 +577,7 @@ def _make_cube(self, a, b, data=0, a_dim=False, b_dim=False): for name, value, dim in zip(["a", "b"], [a, b], [a_dim, b_dim]): dtype = np.str_ if isinstance(value, str) else np.float32 ctype = DimCoord if dim else AuxCoord - coord = ctype( - np.array([value], dtype=dtype), long_name=name, units="1" - ) + coord = ctype(np.array([value], dtype=dtype), long_name=name, units="1") cube.add_aux_coord(coord) return cube @@ -627,9 +586,7 @@ def test_string_a_with_aux(self): templates = (("a", 0), ("b", 1), ("c", 2), ("d", 3)) cubes = [self._make_cube(a, b) for a, b in templates] cube = iris.cube.CubeList(cubes).merge()[0] - self.assertCML( - cube, ("merge", "string_a_with_aux.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "string_a_with_aux.cml"), checksum=False) self.assertIsInstance(cube.coord("a"), AuxCoord) self.assertIsInstance(cube.coord("b"), DimCoord) self.assertTrue(cube.coord("b") in cube.dim_coords) @@ -638,9 +595,7 @@ def test_string_b_with_aux(self): templates = ((0, "a"), (1, "b"), (2, "c"), (3, "d")) cubes = [self._make_cube(a, b) for a, b in templates] cube = iris.cube.CubeList(cubes).merge()[0] - self.assertCML( - cube, ("merge", "string_b_with_aux.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "string_b_with_aux.cml"), checksum=False) self.assertIsInstance(cube.coord("a"), DimCoord) self.assertTrue(cube.coord("a") in cube.dim_coords) self.assertIsInstance(cube.coord("b"), AuxCoord) @@ -649,9 +604,7 @@ def test_string_a_with_dim(self): templates = (("a", 0), ("b", 1), ("c", 2), ("d", 3)) cubes = [self._make_cube(a, b, b_dim=True) for a, b in templates] cube = iris.cube.CubeList(cubes).merge()[0] - self.assertCML( - cube, ("merge", "string_a_with_dim.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "string_a_with_dim.cml"), checksum=False) self.assertIsInstance(cube.coord("a"), AuxCoord) self.assertIsInstance(cube.coord("b"), DimCoord) self.assertTrue(cube.coord("b") in cube.dim_coords) @@ -660,9 +613,7 @@ def test_string_b_with_dim(self): templates = ((0, "a"), (1, "b"), (2, "c"), (3, "d")) cubes = [self._make_cube(a, b, a_dim=True) for a, b in templates] cube = iris.cube.CubeList(cubes).merge()[0] - self.assertCML( - cube, ("merge", "string_b_with_dim.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "string_b_with_dim.cml"), checksum=False) self.assertIsInstance(cube.coord("a"), DimCoord) self.assertTrue(cube.coord("a") in cube.dim_coords) self.assertIsInstance(cube.coord("b"), AuxCoord) @@ -707,9 +658,7 @@ def test_a_dim_b_aux(self): def test_a_dim_b_dim(self): templates = ((0, 10), (1, 11), (2, 12), (3, 13)) - cubes = [ - self._make_cube(a, b, a_dim=True, b_dim=True) for a, b in templates - ] + cubes = [self._make_cube(a, b, a_dim=True, b_dim=True) for a, b in templates] cube = iris.cube.CubeList(cubes).merge()[0] self.assertCML(cube, ("merge", "a_dim_b_dim.cml"), checksum=False) self.assertIsInstance(cube.coord("a"), DimCoord) @@ -754,9 +703,7 @@ def _make_cube(self, a, b, c, data=0): ) ) cube.add_aux_coord( - DimCoord( - np.array([c], dtype=np.int32), standard_name="time", units="1" - ) + DimCoord(np.array([c], dtype=np.int32), standard_name="time", units="1") ) return cube @@ -907,9 +854,7 @@ def test_simple1(self): cube2 = self._make_cube(1, 20, 1) cube3 = self._make_cube(2, 20, 0) cube = iris.cube.CubeList([cube1, cube2, cube3]).merge() - self.assertCML( - cube, ("merge", "time_triple_merging1.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "time_triple_merging1.cml"), checksum=False) def test_simple2(self): cubes = iris.cube.CubeList( @@ -923,14 +868,10 @@ def test_simple2(self): ] ) cube = cubes.merge()[0] - self.assertCML( - cube, ("merge", "time_triple_merging2.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "time_triple_merging2.cml"), checksum=False) cube = iris.cube.CubeList(cubes[:-1]).merge()[0] - self.assertCML( - cube, ("merge", "time_triple_merging3.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "time_triple_merging3.cml"), checksum=False) def test_simple3(self): cubes = iris.cube.CubeList( @@ -944,14 +885,10 @@ def test_simple3(self): ] ) cube = cubes.merge()[0] - self.assertCML( - cube, ("merge", "time_triple_merging4.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "time_triple_merging4.cml"), checksum=False) cube = iris.cube.CubeList(cubes[:-1]).merge()[0] - self.assertCML( - cube, ("merge", "time_triple_merging5.cml"), checksum=False - ) + self.assertCML(cube, ("merge", "time_triple_merging5.cml"), checksum=False) class TestCubeMergeTheoretical(tests.IrisTest): @@ -959,12 +896,8 @@ def test_simple_bounds_merge(self): cube1 = iris.tests.stock.simple_2d() cube2 = iris.tests.stock.simple_2d() - cube1.add_aux_coord( - DimCoord(np.int32(10), long_name="pressure", units="Pa") - ) - cube2.add_aux_coord( - DimCoord(np.int32(11), long_name="pressure", units="Pa") - ) + cube1.add_aux_coord(DimCoord(np.int32(10), long_name="pressure", units="Pa")) + cube2.add_aux_coord(DimCoord(np.int32(11), long_name="pressure", units="Pa")) r = iris.cube.CubeList([cube1, cube2]).merge() self.assertCML(r, ("cube_merge", "test_simple_bound_merge.cml")) @@ -973,12 +906,8 @@ def test_simple_multidim_merge(self): cube1 = iris.tests.stock.simple_2d_w_multidim_coords() cube2 = iris.tests.stock.simple_2d_w_multidim_coords() - cube1.add_aux_coord( - DimCoord(np.int32(10), long_name="pressure", units="Pa") - ) - cube2.add_aux_coord( - DimCoord(np.int32(11), long_name="pressure", units="Pa") - ) + cube1.add_aux_coord(DimCoord(np.int32(10), long_name="pressure", units="Pa")) + cube2.add_aux_coord(DimCoord(np.int32(11), long_name="pressure", units="Pa")) r = iris.cube.CubeList([cube1, cube2]).merge()[0] self.assertCML(r, ("cube_merge", "multidim_coord_merge.cml")) @@ -993,12 +922,8 @@ def test_simple_points_merge(self): cube1 = iris.tests.stock.simple_2d(with_bounds=False) cube2 = iris.tests.stock.simple_2d(with_bounds=False) - cube1.add_aux_coord( - DimCoord(np.int32(10), long_name="pressure", units="Pa") - ) - cube2.add_aux_coord( - DimCoord(np.int32(11), long_name="pressure", units="Pa") - ) + cube1.add_aux_coord(DimCoord(np.int32(10), long_name="pressure", units="Pa")) + cube2.add_aux_coord(DimCoord(np.int32(11), long_name="pressure", units="Pa")) r = iris.cube.CubeList([cube1, cube2]).merge() self.assertCML(r, ("cube_merge", "test_simple_merge.cml")) @@ -1038,9 +963,7 @@ def _makecube(self, y, cm=False, av=False): cube.add_dim_coord(iris.coords.DimCoord([0, 1], long_name="x"), 0) cube.add_aux_coord(iris.coords.DimCoord(y, long_name="y")) if cm: - cube.add_cell_measure( - iris.coords.CellMeasure([1, 1], long_name="foo"), 0 - ) + cube.add_cell_measure(iris.coords.CellMeasure([1, 1], long_name="foo"), 0) if av: cube.add_ancillary_variable( iris.coords.AncillaryVariable([1, 1], long_name="bar"), 0 @@ -1062,9 +985,7 @@ def test_fail_missing_ancillary_variable(self): def test_fail_different_cell_measure(self): cube1 = self._makecube(0, cm=True) cube2 = self._makecube(1) - cube2.add_cell_measure( - iris.coords.CellMeasure([2, 2], long_name="foo"), 0 - ) + cube2.add_cell_measure(iris.coords.CellMeasure([2, 2], long_name="foo"), 0) cubes = iris.cube.CubeList([cube1, cube2]).merge() self.assertEqual(len(cubes), 2) @@ -1089,9 +1010,7 @@ def test_merge_with_ancillary_variable(self): cube2 = self._makecube(1, av=True) cubes = iris.cube.CubeList([cube1, cube2]).merge() self.assertEqual(len(cubes), 1) - self.assertEqual( - cube1.ancillary_variables(), cubes[0].ancillary_variables() - ) + self.assertEqual(cube1.ancillary_variables(), cubes[0].ancillary_variables()) def test_cell_measure_error_msg(self): msg = "cube.cell_measures differ" @@ -1135,9 +1054,7 @@ def _check_merge_error(self, attrs_1, attrs_2, expected_message): aux_coords_and_dims=[(AuxCoord([2], long_name="x"), None)], attributes=attrs_2, ) - with self.assertRaisesRegex( - iris.exceptions.MergeError, expected_message - ): + with self.assertRaisesRegex(iris.exceptions.MergeError, expected_message): iris.cube.CubeList([cube_1, cube_2]).merge_cube() def test_keys_differ__single(self): diff --git a/lib/iris/tests/test_name.py b/lib/iris/tests/test_name.py index 51bc92c28c..b1fb6d1492 100644 --- a/lib/iris/tests/test_name.py +++ b/lib/iris/tests/test_name.py @@ -24,36 +24,24 @@ def test_NAMEII_field(self): self.assertCMLApproxData(cubes, ("name", "NAMEII_field.cml")) def test_NAMEIII_timeseries(self): - cubes = iris.load( - tests.get_data_path(("NAME", "NAMEIII_timeseries.txt")) - ) + cubes = iris.load(tests.get_data_path(("NAME", "NAMEIII_timeseries.txt"))) self.assertCMLApproxData(cubes, ("name", "NAMEIII_timeseries.cml")) def test_NAMEII_timeseries(self): - cubes = iris.load( - tests.get_data_path(("NAME", "NAMEII_timeseries.txt")) - ) + cubes = iris.load(tests.get_data_path(("NAME", "NAMEII_timeseries.txt"))) self.assertCMLApproxData(cubes, ("name", "NAMEII_timeseries.cml")) def test_NAMEIII_version2(self): - cubes = iris.load( - tests.get_data_path(("NAME", "NAMEIII_version2.txt")) - ) + cubes = iris.load(tests.get_data_path(("NAME", "NAMEIII_version2.txt"))) self.assertCMLApproxData(cubes, ("name", "NAMEIII_version2.cml")) def test_NAMEIII_trajectory(self): - cubes = iris.load( - tests.get_data_path(("NAME", "NAMEIII_trajectory.txt")) - ) + cubes = iris.load(tests.get_data_path(("NAME", "NAMEIII_trajectory.txt"))) self.assertCML(cubes[0], ("name", "NAMEIII_trajectory0.cml")) - self.assertCML( - cubes, ("name", "NAMEIII_trajectory.cml"), checksum=False - ) + self.assertCML(cubes, ("name", "NAMEIII_trajectory.cml"), checksum=False) def test_NAMEII__no_time_averaging(self): - cubes = iris.load( - tests.get_data_path(("NAME", "NAMEII_no_time_averaging.txt")) - ) + cubes = iris.load(tests.get_data_path(("NAME", "NAMEII_no_time_averaging.txt"))) # Also check that it saves without error. # This was previously failing, see https://github.com/SciTools/iris/issues/3288 diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 793f8df876..b46435579d 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -47,9 +47,7 @@ def tearDown(self): def test_monotonic(self): cubes = iris.load( - tests.get_data_path( - ("NetCDF", "testing", "test_monotonic_coordinate.nc") - ) + tests.get_data_path(("NetCDF", "testing", "test_monotonic_coordinate.nc")) ) cubes = sorted(cubes, key=lambda cube: cube.var_name) self.assertCML(cubes, ("netcdf", "netcdf_monotonic.cml")) @@ -89,9 +87,7 @@ def test_missing_time_bounds(self): def test_load_global_xyzt_gems(self): # Test loading single xyzt CF-netCDF file (multi-cube). cubes = iris.load( - tests.get_data_path( - ("NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc") - ) + tests.get_data_path(("NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc")) ) cubes = sorted(cubes, key=lambda cube: cube.name()) self.assertCML(cubes, ("netcdf", "netcdf_global_xyzt_gems.cml")) @@ -114,9 +110,7 @@ def test_load_global_xyzt_gems_iter(self): key=lambda cube: cube.name(), ) ): - self.assertCML( - cube, ("netcdf", "netcdf_global_xyzt_gems_iter_%d.cml" % i) - ) + self.assertCML(cube, ("netcdf", "netcdf_global_xyzt_gems_iter_%d.cml" % i)) # ------------------------------------------------------------------------- # It is not considered necessary to have integration tests for @@ -141,17 +135,13 @@ def test_load_rotated_xyt_precipitation(self): ("NetCDF", "rotated", "xyt", "small_rotPole_precipitation.nc") ) ) - self.assertCML( - cube, ("netcdf", "netcdf_rotated_xyt_precipitation.cml") - ) + self.assertCML(cube, ("netcdf", "netcdf_rotated_xyt_precipitation.cml")) def test_load_tmerc_grid_and_clim_bounds(self): # Test loading a single CF-netCDF file with a transverse Mercator # grid_mapping and a time variable with climatology. cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "transverse_mercator", "tmean_1910_1910.nc") - ) + tests.get_data_path(("NetCDF", "transverse_mercator", "tmean_1910_1910.nc")) ) self.assertCML(cube, ("netcdf", "netcdf_tmerc_and_climatology.cml")) @@ -180,12 +170,8 @@ def test_load_tmerc_grid_with_projection_origin(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.91 ), ) - self.assertEqual( - cube.coord("projection_x_coordinate").coord_system, expected - ) - self.assertEqual( - cube.coord("projection_y_coordinate").coord_system, expected - ) + self.assertEqual(cube.coord("projection_x_coordinate").coord_system, expected) + self.assertEqual(cube.coord("projection_y_coordinate").coord_system, expected) def test_load_lcc_grid(self): # Test loading a single CF-netCDF file with Lambert conformal conic @@ -212,9 +198,7 @@ def test_missing_climatology(self): def test_load_merc_grid(self): # Test loading a single CF-netCDF file with a Mercator grid_mapping cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "mercator", "toa_brightness_temperature.nc") - ) + tests.get_data_path(("NetCDF", "mercator", "toa_brightness_temperature.nc")) ) self.assertCML(cube, ("netcdf", "netcdf_merc.cml")) @@ -222,9 +206,7 @@ def test_load_complex_merc_grid(self): # Test loading a single CF-netCDF file with a Mercator grid_mapping that # includes false easting and northing and a standard parallel cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "mercator", "false_east_north_merc.nc") - ) + tests.get_data_path(("NetCDF", "mercator", "false_east_north_merc.nc")) ) self.assertCML(cube, ("netcdf", "netcdf_merc_false.cml")) @@ -232,9 +214,7 @@ def test_load_merc_grid_non_unit_scale_factor(self): # Test loading a single CF-netCDF file with a Mercator grid_mapping that # includes a non-unit scale factor at projection origin cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "mercator", "non_unit_scale_factor_merc.nc") - ) + tests.get_data_path(("NetCDF", "mercator", "non_unit_scale_factor_merc.nc")) ) self.assertCML(cube, ("netcdf", "netcdf_merc_scale_factor.cml")) @@ -252,17 +232,13 @@ def test_load_polar_stereographic_grid(self): # Test loading a single CF-netCDF file with a polar stereographic # grid_mapping. cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "polar", "toa_brightness_temperature.nc") - ) + tests.get_data_path(("NetCDF", "polar", "toa_brightness_temperature.nc")) ) self.assertCML(cube, ("netcdf", "netcdf_polar.cml")) def test_cell_methods(self): # Test exercising CF-netCDF cell method parsing. - cubes = iris.load( - tests.get_data_path(("NetCDF", "testing", "cell_methods.nc")) - ) + cubes = iris.load(tests.get_data_path(("NetCDF", "testing", "cell_methods.nc"))) # TEST_COMPAT mod - new cube merge doesn't sort in the same way - test # can pass by manual sorting... @@ -291,18 +267,12 @@ def test_deferred_loading(self): # Consecutive index on same dimension. self.assertCML(cube[0], ("netcdf", "netcdf_deferred_index_0.cml")) self.assertCML(cube[0][0], ("netcdf", "netcdf_deferred_index_1.cml")) - self.assertCML( - cube[0][0][0], ("netcdf", "netcdf_deferred_index_2.cml") - ) + self.assertCML(cube[0][0][0], ("netcdf", "netcdf_deferred_index_2.cml")) # Consecutive slice on same dimension. self.assertCML(cube[0:20], ("netcdf", "netcdf_deferred_slice_0.cml")) - self.assertCML( - cube[0:20][0:10], ("netcdf", "netcdf_deferred_slice_1.cml") - ) - self.assertCML( - cube[0:20][0:10][0:5], ("netcdf", "netcdf_deferred_slice_2.cml") - ) + self.assertCML(cube[0:20][0:10], ("netcdf", "netcdf_deferred_slice_1.cml")) + self.assertCML(cube[0:20][0:10][0:5], ("netcdf", "netcdf_deferred_slice_2.cml")) # Consecutive tuple index on same dimension. self.assertCML( @@ -325,9 +295,7 @@ def test_deferred_loading(self): cube[((2, 7, 3, 4, 5, 0, 9, 10),)][2:6][3], ("netcdf", "netcdf_deferred_mix_0.cml"), ) - self.assertCML( - cube[0][(0, 2), (1, 3)], ("netcdf", "netcdf_deferred_mix_1.cml") - ) + self.assertCML(cube[0][(0, 2), (1, 3)], ("netcdf", "netcdf_deferred_mix_1.cml")) def test_um_stash_source(self): """Test that um_stash_source is converted into a STASH code""" @@ -453,9 +421,7 @@ def test_bad_um_stash_source(self): cubes = list(nc_load_cubes(nc_path)) self.assertEqual(len(cubes), 1) self.assertFalse(hasattr(cubes[0].attributes, "STASH")) - self.assertEqual( - cubes[0].attributes["um_stash_source"], "10*m01s02i003" - ) + self.assertEqual(cubes[0].attributes["um_stash_source"], "10*m01s02i003") def test_units(self): # Test exercising graceful cube and coordinate units loading. @@ -533,9 +499,7 @@ def test_hybrid(self): iris.save(cube, file_out, netcdf_format="NETCDF3_CLASSIC") # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, ("netcdf", "netcdf_save_realistic_4d.cdl") - ) + self.assertCDL(file_out, ("netcdf", "netcdf_save_realistic_4d.cdl")) def test_no_hybrid(self): cube = stock.realistic_4d() @@ -555,9 +519,7 @@ def test_scalar_cube(self): with self.temp_filename(suffix=".nc") as filename: iris.save(cube, filename, netcdf_format="NETCDF3_CLASSIC") - self.assertCDL( - filename, ("netcdf", "netcdf_save_realistic_0d.cdl") - ) + self.assertCDL(filename, ("netcdf", "netcdf_save_realistic_0d.cdl")) def test_no_name_cube(self): # Cube with no names. @@ -575,9 +537,7 @@ def test_no_name_cube(self): class TestNetCDFSave(tests.IrisTest): def setUp(self): - self.cubell = iris.cube.Cube( - np.arange(4).reshape(2, 2), "air_temperature" - ) + self.cubell = iris.cube.Cube(np.arange(4).reshape(2, 2), "air_temperature") self.cube = iris.cube.Cube( np.zeros([2, 2]), standard_name="surface_temperature", @@ -709,9 +669,7 @@ def test_netcdf_save_multi2multi(self): iris.save(cube, file_out) # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, ("netcdf", "netcdf_save_multi_%d.cdl" % index) - ) + self.assertCDL(file_out, ("netcdf", "netcdf_save_multi_%d.cdl" % index)) @tests.skip_data def test_netcdf_save_multi2single(self): @@ -797,18 +755,14 @@ def test_netcdf_multi_wtih_samedimcoord(self): iris.save(cubes, file_out) # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, ("netcdf", "netcdf_save_samedimcoord.cdl") - ) + self.assertCDL(file_out, ("netcdf", "netcdf_save_samedimcoord.cdl")) def test_netcdf_multi_conflict_name_dup_coord(self): # Duplicate coordinates with modified variable names lookup. latitude1 = iris.coords.DimCoord( np.arange(10), standard_name="latitude", units="1" ) - time2 = iris.coords.DimCoord( - np.arange(2), standard_name="time", units="1" - ) + time2 = iris.coords.DimCoord(np.arange(2), standard_name="time", units="1") latitude2 = iris.coords.DimCoord( np.arange(2), standard_name="latitude", units="1" ) @@ -832,9 +786,7 @@ def test_netcdf_hybrid_height(self): # (i.e. dimensionless vertical) coordinate. # Read PP input file. names = ["air_potential_temperature", "surface_altitude"] - file_in = tests.get_data_path( - ("PP", "COLPEX", "small_colpex_theta_p_alt.pp") - ) + file_in = tests.get_data_path(("PP", "COLPEX", "small_colpex_theta_p_alt.pp")) cube = iris.load_cube(file_in, names[0]) # Write Cube to netCDF file. @@ -842,9 +794,7 @@ def test_netcdf_hybrid_height(self): iris.save(cube, file_out) # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, ("netcdf", "netcdf_save_hybrid_height.cdl") - ) + self.assertCDL(file_out, ("netcdf", "netcdf_save_hybrid_height.cdl")) # Read netCDF file. cubes = iris.load(file_out) @@ -871,17 +821,13 @@ def test_netcdf_save_ndim_auxiliary(self): iris.save(cube, file_out) # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, ("netcdf", "netcdf_save_ndim_auxiliary.cdl") - ) + self.assertCDL(file_out, ("netcdf", "netcdf_save_ndim_auxiliary.cdl")) # Read the netCDF file. cube = iris.load_cube(file_out) # Check the netCDF read, write, read mechanism. - self.assertCML( - cube, ("netcdf", "netcdf_save_load_ndim_auxiliary.cml") - ) + self.assertCML(cube, ("netcdf", "netcdf_save_load_ndim_auxiliary.cml")) def test_netcdf_save_conflicting_aux(self): # Test saving CF-netCDF with multi-dimensional auxiliary coordinates, @@ -979,9 +925,7 @@ def test_netcdf_save_gridmapping(self): iris.save(cubes, file_out) # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, ("netcdf", "netcdf_save_gridmapmulti.cdl") - ) + self.assertCDL(file_out, ("netcdf", "netcdf_save_gridmapmulti.cdl")) def test_netcdf_save_conflicting_names(self): # Test saving CF-netCDF with a dimension name corresponding to @@ -989,9 +933,7 @@ def test_netcdf_save_conflicting_names(self): self.cube4.add_dim_coord( iris.coords.DimCoord(np.arange(10), "time", units="1"), 0 ) - self.cube6.add_aux_coord( - iris.coords.AuxCoord(1, "time", units="1"), None - ) + self.cube6.add_aux_coord(iris.coords.AuxCoord(1, "time", units="1"), None) cubes = iris.cube.CubeList([self.cube4, self.cube6]) with self.temp_filename(suffix=".nc") as file_out: @@ -1015,9 +957,7 @@ def test_trajectory(self): with self.temp_filename(suffix=".nc") as temp_filename: iris.save(traj, temp_filename) reloaded = iris.load_cube(temp_filename) - self.assertCML( - reloaded, ("netcdf", "save_load_traj.cml"), checksum=False - ) + self.assertCML(reloaded, ("netcdf", "save_load_traj.cml"), checksum=False) self.assertArrayEqual(traj.data, reloaded.data) def test_attributes(self): @@ -1053,9 +993,7 @@ def test_attributes(self): for gkey in aglobals: if getattr(ds, gkey) != aglobals.get(gkey): exceptions.append( - "{} != {}".format( - getattr(ds, gkey), aglobals.get(gkey) - ) + "{} != {}".format(getattr(ds, gkey), aglobals.get(gkey)) ) # Should be overridden. for okey in aover: @@ -1072,9 +1010,7 @@ def test_attributes(self): "{} != {}".format(getattr(dv, vkey), avars.get(vkey)) ) if getattr(dv, "um_stash_source") != avars.get("STASH"): - exc = "{} != {}".format( - getattr(dv, "um_stash_source"), avars.get(vkey) - ) + exc = "{} != {}".format(getattr(dv, "um_stash_source"), avars.get(vkey)) exceptions.append(exc) self.assertEqual(exceptions, []) @@ -1099,9 +1035,7 @@ def test_conflicting_global_attributes(self): with self.temp_filename(suffix=".nc") as filename: with mock.patch("warnings.warn") as warn: iris.save([self.cube, self.cube2], filename) - warn.assert_called_with( - expected_msg, category=IrisCfSaveWarning - ) + warn.assert_called_with(expected_msg, category=IrisCfSaveWarning) self.assertCDL( filename, ("netcdf", "netcdf_save_confl_global_attr.cdl") ) @@ -1139,9 +1073,7 @@ def test_no_global_attributes(self): ] with self.temp_filename(suffix=".nc") as filename: iris.save(cubes, filename) - self.assertCDL( - filename, ("netcdf", "netcdf_save_no_global_attr.cdl") - ) + self.assertCDL(filename, ("netcdf", "netcdf_save_no_global_attr.cdl")) class TestNetCDFSave__ancillaries(tests.IrisTest): @@ -1272,9 +1204,7 @@ def setUp(self): ) def test_int64_dimension_coord_netcdf3(self): - coord = iris.coords.DimCoord( - np.array([1, 2], dtype=np.int64), long_name="x" - ) + coord = iris.coords.DimCoord(np.array([1, 2], dtype=np.int64), long_name="x") self.cube.add_dim_coord(coord, 0) with self.temp_filename(suffix=".nc") as filename: iris.save(self.cube, filename, netcdf_format="NETCDF3_CLASSIC") @@ -1286,9 +1216,7 @@ def test_int64_dimension_coord_netcdf3(self): ) def test_int64_auxiliary_coord_netcdf3(self): - coord = iris.coords.AuxCoord( - np.array([1, 2], dtype=np.int64), long_name="x" - ) + coord = iris.coords.AuxCoord(np.array([1, 2], dtype=np.int64), long_name="x") self.cube.add_aux_coord(coord, 0) with self.temp_filename(suffix=".nc") as filename: iris.save(self.cube, filename, netcdf_format="NETCDF3_CLASSIC") @@ -1307,9 +1235,7 @@ def test_int64_data_netcdf3(self): self.assertCML(reloaded, ("netcdf", "int64_data_netcdf3.cml")) def test_uint32_dimension_coord_netcdf3(self): - coord = iris.coords.DimCoord( - np.array([1, 2], dtype=np.uint32), long_name="x" - ) + coord = iris.coords.DimCoord(np.array([1, 2], dtype=np.uint32), long_name="x") self.cube.add_dim_coord(coord, 0) with self.temp_filename(suffix=".nc") as filename: iris.save(self.cube, filename, netcdf_format="NETCDF3_CLASSIC") @@ -1321,9 +1247,7 @@ def test_uint32_dimension_coord_netcdf3(self): ) def test_uint32_auxiliary_coord_netcdf3(self): - coord = iris.coords.AuxCoord( - np.array([1, 2], dtype=np.uint32), long_name="x" - ) + coord = iris.coords.AuxCoord(np.array([1, 2], dtype=np.uint32), long_name="x") self.cube.add_aux_coord(coord, 0) with self.temp_filename(suffix=".nc") as filename: iris.save(self.cube, filename, netcdf_format="NETCDF3_CLASSIC") @@ -1431,7 +1355,7 @@ def test_process_flags(self): process_flags = cube.attributes["ukmo__process_flags"] self.assertTrue( len(process_flags) == len(bits), - "Mismatch in " "number of process flags.", + "Mismatch in number of process flags.", ) self.assertEqual(set(process_flags), set(descriptions)) diff --git a/lib/iris/tests/test_peak.py b/lib/iris/tests/test_peak.py index 1d9dd68cc1..226a8dbf7f 100644 --- a/lib/iris/tests/test_peak.py +++ b/lib/iris/tests/test_peak.py @@ -96,9 +96,7 @@ def test_peak_duplicate_coords(self): collapsed_cube.data, np.array([3], dtype=np.float32) ) - collapsed_cube = cube.collapsed( - ("latitude", "latitude"), iris.analysis.PEAK - ) + collapsed_cube = cube.collapsed(("latitude", "latitude"), iris.analysis.PEAK) self.assertArrayAlmostEqual( collapsed_cube.data, np.array([3], dtype=np.float32) ) @@ -128,21 +126,15 @@ def test_peak_2d(self): collapsed_cube = cube.collapsed("latitude", iris.analysis.PEAK) self.assertArrayAlmostEqual( collapsed_cube.data, - np.array( - [4.024977, 5.024977, 7.017852, 4.024977], dtype=np.float32 - ), + np.array([4.024977, 5.024977, 7.017852, 4.024977], dtype=np.float32), ) - collapsed_cube = cube.collapsed( - ("longitude", "latitude"), iris.analysis.PEAK - ) + collapsed_cube = cube.collapsed(("longitude", "latitude"), iris.analysis.PEAK) self.assertArrayAlmostEqual( collapsed_cube.data, np.array([7.041787], dtype=np.float32) ) - collapsed_cube = cube.collapsed( - ("latitude", "longitude"), iris.analysis.PEAK - ) + collapsed_cube = cube.collapsed(("latitude", "longitude"), iris.analysis.PEAK) self.assertArrayAlmostEqual( collapsed_cube.data, np.array([7.041629], dtype=np.float32) ) diff --git a/lib/iris/tests/test_pickling.py b/lib/iris/tests/test_pickling.py index 342b07cb03..7e1a1bdb12 100644 --- a/lib/iris/tests/test_pickling.py +++ b/lib/iris/tests/test_pickling.py @@ -48,13 +48,9 @@ def assertCubeData(self, cube1, cube2): @tests.skip_data def test_cube_pickle(self): - cube = iris.load_cube( - tests.get_data_path(("PP", "globClim1", "theta.pp")) - ) + cube = iris.load_cube(tests.get_data_path(("PP", "globClim1", "theta.pp"))) self.assertTrue(cube.has_lazy_data()) - self.assertCML( - cube, ("cube_io", "pickling", "theta.cml"), checksum=False - ) + self.assertCML(cube, ("cube_io", "pickling", "theta.cml"), checksum=False) for p, recon_cube in self.pickle_then_unpickle(cube): self.assertTrue(recon_cube.has_lazy_data()) @@ -103,9 +99,7 @@ def test_cubelist_pickle(self): ("cube_io", "pickling", "single_cube.cml"), ) - for cube_orig, cube_reconstruct in zip( - cubelist, reconstructed_cubelist - ): + for cube_orig, cube_reconstruct in zip(cubelist, reconstructed_cubelist): self.assertArrayEqual(cube_orig.data, cube_reconstruct.data) self.assertEqual(cube_orig, cube_reconstruct) diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 150d521e34..ea9736c016 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -170,9 +170,7 @@ def test_cube_coord(self): def test_coord_coord(self): # plot two coordinates that are not mappable - self.draw_method( - self.cube1d.coord("sigma"), self.cube1d.coord("altitude") - ) + self.draw_method(self.cube1d.coord("sigma"), self.cube1d.coord("altitude")) self.check_graphic() def test_coord_coord_map(self): @@ -347,9 +345,7 @@ class Test1dFillBetween(tests.GraphicsTest): def setUp(self): super().setUp() self.cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "testing", "small_theta_colpex.nc") - ), + tests.get_data_path(("NetCDF", "testing", "small_theta_colpex.nc")), "air_potential_temperature", )[0, 0] self.draw_method = iplt.fill_between @@ -437,9 +433,7 @@ class Test1dQuickplotFillBetween(Test1dFillBetween): def setUp(self): tests.GraphicsTest.setUp(self) self.cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "testing", "small_theta_colpex.nc") - ), + tests.get_data_path(("NetCDF", "testing", "small_theta_colpex.nc")), "air_potential_temperature", )[0, 0] self.draw_method = qplt.fill_between @@ -461,9 +455,7 @@ def test_1d_positive_down(self): self.check_graphic() def test_2d_positive_up(self): - path = tests.get_data_path( - ("NetCDF", "testing", "small_theta_colpex.nc") - ) + path = tests.get_data_path(("NetCDF", "testing", "small_theta_colpex.nc")) cube = iris.load_cube(path, "air_potential_temperature")[0, :, 42, :] qplt.pcolormesh(cube) self.check_graphic() @@ -675,9 +667,7 @@ class test is then overridden to ignore warnings in order to def __new__(cls, name, bases, local): def add_decorated_methods(attr_dict, target_dict, decorator): for key, value in attr_dict.items(): - if isinstance(value, types.FunctionType) and key.startswith( - "test" - ): + if isinstance(value, types.FunctionType) and key.startswith("test"): new_key = "_".join((key, decorator.__name__)) if new_key not in target_dict: wrapped = decorator(value) @@ -685,15 +675,12 @@ def add_decorated_methods(attr_dict, target_dict, decorator): target_dict[new_key] = wrapped else: raise RuntimeError( - "A attribute called {!r} " - "already exists.".format(new_key) + "A attribute called {!r} already exists.".format(new_key) ) def override_with_decorated_methods(attr_dict, target_dict, decorator): for key, value in attr_dict.items(): - if isinstance(value, types.FunctionType) and key.startswith( - "test" - ): + if isinstance(value, types.FunctionType) and key.startswith("test"): target_dict[key] = decorator(value) # Add decorated versions of base methods @@ -703,9 +690,7 @@ def override_with_decorated_methods(attr_dict, target_dict, decorator): # Override base methods to ignore warnings. for base in bases: - override_with_decorated_methods( - base.__dict__, local, ignore_warnings - ) + override_with_decorated_methods(base.__dict__, local, ignore_warnings) return type.__new__(cls, name, bases, local) @@ -837,23 +822,17 @@ def __repr__(self): class TestPlotCoordinatesGiven(tests.GraphicsTest): def setUp(self): super().setUp() - filename = tests.get_data_path( - ("PP", "COLPEX", "theta_and_orog_subset.pp") - ) + filename = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) self.cube = load_cube_once(filename, "air_potential_temperature") self.draw_module = iris.plot self.contourf = LambdaStr( "iris.plot.contourf", - lambda cube, *args, **kwargs: iris.plot.contourf( - cube, *args, **kwargs - ), + lambda cube, *args, **kwargs: iris.plot.contourf(cube, *args, **kwargs), ) self.contour = LambdaStr( "iris.plot.contour", - lambda cube, *args, **kwargs: iris.plot.contour( - cube, *args, **kwargs - ), + lambda cube, *args, **kwargs: iris.plot.contour(cube, *args, **kwargs), ) self.points = LambdaStr( "iris.plot.points", @@ -863,9 +842,7 @@ def setUp(self): ) self.plot = LambdaStr( "iris.plot.plot", - lambda cube, *args, **kwargs: iris.plot.plot( - cube, *args, **kwargs - ), + lambda cube, *args, **kwargs: iris.plot.plot(cube, *args, **kwargs), ) self.results = { diff --git a/lib/iris/tests/test_pp_cf.py b/lib/iris/tests/test_pp_cf.py index 44650919e1..59bfe28965 100644 --- a/lib/iris/tests/test_pp_cf.py +++ b/lib/iris/tests/test_pp_cf.py @@ -17,9 +17,7 @@ import iris.util -def callback_000003000000_16_202_000128_1860_09_01_00_00_b_pp( - cube, field, filename -): +def callback_000003000000_16_202_000128_1860_09_01_00_00_b_pp(cube, field, filename): cube.attributes["STASH"] = STASH(1, 16, 202) cube.standard_name = "geopotential_height" cube.units = "m" @@ -55,9 +53,7 @@ def callback_integer_b_pp(cube, field, filename): del cube.attributes["STASH"] -def callback_001000000000_00_000_000000_1860_01_01_00_00_f_b_pp( - cube, field, filename -): +def callback_001000000000_00_000_000000_1860_01_01_00_00_f_b_pp(cube, field, filename): cube.standard_name = "sea_surface_height_above_geoid" cube.units = "m" @@ -95,9 +91,7 @@ def _test_file(self, name): # https://github.com/Unidata/netcdf4-python/issues/725 fill_value = _thread_safe_nc.default_fillvals[cube.dtype.str[1:]] - file_nc = tempfile.NamedTemporaryFile( - suffix=".nc", delete=False - ).name + file_nc = tempfile.NamedTemporaryFile(suffix=".nc", delete=False).name iris.save( cube, file_nc, @@ -108,8 +102,7 @@ def _test_file(self, name): # Check the netCDF file against CDL expected output. self.assertCDL( file_nc, - self._ref_dir - + ("to_netcdf", "%s_%d.cdl" % (fname_name, index)), + self._ref_dir + ("to_netcdf", "%s_%d.cdl" % (fname_name, index)), ) nc_filenames.append(file_nc) @@ -119,8 +112,7 @@ def _test_file(self, name): cube = iris.load_cube(nc_filename) self.assertCML( cube, - self._ref_dir - + ("from_netcdf", "%s_%d.cml" % (fname_name, index)), + self._ref_dir + ("from_netcdf", "%s_%d.cml" % (fname_name, index)), ) os.remove(nc_filename) diff --git a/lib/iris/tests/test_pp_module.py b/lib/iris/tests/test_pp_module.py index b8606e3120..6e683c0e42 100644 --- a/lib/iris/tests/test_pp_module.py +++ b/lib/iris/tests/test_pp_module.py @@ -140,9 +140,7 @@ def test_lbproc_bad_access(self): @tests.skip_data class TestPPField_GlobalTemperature(IrisPPTest): def setUp(self): - self.original_pp_filepath = tests.get_data_path( - ("PP", "aPPglob1", "global.pp") - ) + self.original_pp_filepath = tests.get_data_path(("PP", "aPPglob1", "global.pp")) self.r = list(pp.load(self.original_pp_filepath)) def test_full_file(self): @@ -260,18 +258,14 @@ def test_lots_of_extra_data(self): @tests.skip_data class TestPPFileExtraXData(IrisPPTest): def setUp(self): - self.original_pp_filepath = tests.get_data_path( - ("PP", "ukV1", "ukVpmslont.pp") - ) + self.original_pp_filepath = tests.get_data_path(("PP", "ukV1", "ukVpmslont.pp")) self.r = list(pp.load(self.original_pp_filepath))[0:5] def test_full_file(self): self.check_pp(self.r, ("PP", "extra_x_data.pp.txt")) def test_save_single(self): - filepath = tests.get_data_path( - ("PP", "ukV1", "ukVpmslont_first_field.pp") - ) + filepath = tests.get_data_path(("PP", "ukV1", "ukVpmslont_first_field.pp")) f = next(pp.load(filepath)) temp_filename = iris.util.create_temp_filename(".pp") @@ -322,9 +316,7 @@ def test_full_file(self): ) def test_save_single(self): - filepath = tests.get_data_path( - ("PP", "model_comp", "dec_first_field.pp") - ) + filepath = tests.get_data_path(("PP", "model_comp", "dec_first_field.pp")) f = next(pp.load(filepath)) temp_filename = iris.util.create_temp_filename(".pp") diff --git a/lib/iris/tests/test_pp_stash.py b/lib/iris/tests/test_pp_stash.py index 733d1697de..e5b6953bf3 100644 --- a/lib/iris/tests/test_pp_stash.py +++ b/lib/iris/tests/test_pp_stash.py @@ -49,40 +49,28 @@ def test_stash_against_str(self): self.assertNotEqual("m02s03i004", iris.fileformats.pp.STASH(1, 2, 3)) def test_irregular_stash_str(self): - self.assertEqual( - iris.fileformats.pp.STASH(1, 2, 3), "m01s02i0000000003" - ) + self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m01s02i0000000003") self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m01s02i3") self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m01s2i3") self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m1s2i3") - self.assertEqual( - "m01s02i0000000003", iris.fileformats.pp.STASH(1, 2, 3) - ) + self.assertEqual("m01s02i0000000003", iris.fileformats.pp.STASH(1, 2, 3)) self.assertEqual("m01s02i3", iris.fileformats.pp.STASH(1, 2, 3)) self.assertEqual("m01s2i3", iris.fileformats.pp.STASH(1, 2, 3)) self.assertEqual("m1s2i3", iris.fileformats.pp.STASH(1, 2, 3)) - self.assertNotEqual( - iris.fileformats.pp.STASH(2, 3, 4), "m01s02i0000000003" - ) + self.assertNotEqual(iris.fileformats.pp.STASH(2, 3, 4), "m01s02i0000000003") self.assertNotEqual(iris.fileformats.pp.STASH(2, 3, 4), "m01s02i3") self.assertNotEqual(iris.fileformats.pp.STASH(2, 3, 4), "m01s2i3") self.assertNotEqual(iris.fileformats.pp.STASH(2, 3, 4), "m1s2i3") - self.assertNotEqual( - "m01s02i0000000003", iris.fileformats.pp.STASH(2, 3, 4) - ) + self.assertNotEqual("m01s02i0000000003", iris.fileformats.pp.STASH(2, 3, 4)) self.assertNotEqual("m01s02i3", iris.fileformats.pp.STASH(2, 3, 4)) self.assertNotEqual("m01s2i3", iris.fileformats.pp.STASH(2, 3, 4)) self.assertNotEqual("m1s2i3", iris.fileformats.pp.STASH(2, 3, 4)) - self.assertEqual( - iris.fileformats.pp.STASH.from_msi("M01s02i003"), "m01s02i003" - ) - self.assertEqual( - "m01s02i003", iris.fileformats.pp.STASH.from_msi("M01s02i003") - ) + self.assertEqual(iris.fileformats.pp.STASH.from_msi("M01s02i003"), "m01s02i003") + self.assertEqual("m01s02i003", iris.fileformats.pp.STASH.from_msi("M01s02i003")) def test_illegal_stash_str_range(self): self.assertEqual(iris.fileformats.pp.STASH(0, 2, 3), "m??s02i003") diff --git a/lib/iris/tests/test_pp_to_cube.py b/lib/iris/tests/test_pp_to_cube.py index d9c47c7841..da49ff8188 100644 --- a/lib/iris/tests/test_pp_to_cube.py +++ b/lib/iris/tests/test_pp_to_cube.py @@ -81,9 +81,7 @@ def test_regrid_missing_coord(self): # If the target cube is missing one of the source dimension # coords, ensure the re-grid fails nicely - i.e. returns None. self.target.remove_coord("bar") - new_ref = iris.fileformats.rules._ensure_aligned( - {}, self.ref, self.target - ) + new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) self.assertIsNone(new_ref) def test_regrid_codimension(self): @@ -94,15 +92,11 @@ def test_regrid_codimension(self): new_foo = self.target.coord("bar").copy() new_foo.rename("foo") self.target.add_aux_coord(new_foo, 0) - new_ref = iris.fileformats.rules._ensure_aligned( - {}, self.ref, self.target - ) + new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) self.assertIsNone(new_ref) def test_regrid_identity(self): - new_ref = iris.fileformats.rules._ensure_aligned( - {}, self.ref, self.target - ) + new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) # Bounds don't make it through the re-grid process self.ref.coord("bar").bounds = None self.ref.coord("foo").bounds = None @@ -135,9 +129,7 @@ def test_lbproc(self): # Set up standard name and T+24 constraint constraint = iris.Constraint("air_temperature", forecast_period=24) cubes = iris.load(data_path, constraint) - cubes = iris.cube.CubeList( - [cubes[0], cubes[3], cubes[1], cubes[2], cubes[4]] - ) + cubes = iris.cube.CubeList([cubes[0], cubes[3], cubes[1], cubes[2], cubes[4]]) self.assertCML(cubes, ("pp_load_rules", "lbproc_mean_max_min.cml")) def test_cell_methods(self): @@ -175,9 +167,7 @@ def test_cell_methods(self): if value in cell_method_values: # Check for cell method on cube - self.assertEqual( - cube.cell_methods[0].method, cell_method_values[value] - ) + self.assertEqual(cube.cell_methods[0].method, cell_method_values[value]) else: # Check no cell method was created for values other than 128, 4096, 8192 self.assertEqual(len(cube.cell_methods), 0) @@ -206,9 +196,7 @@ def test_process_flags(self): if value in omit_process_flags_values: # Check ukmo__process_flags attribute not created - self.assertEqual( - cube.attributes.get("ukmo__process_flags", None), None - ) + self.assertEqual(cube.attributes.get("ukmo__process_flags", None), None) else: # Check ukmo__process_flags attribute contains correct values self.assertIn( @@ -243,7 +231,7 @@ def test_process_flags(self): self.assertEqual( set(cube.attributes["ukmo__process_flags"]), set(multiple_map[sum(bit_values)]), - "Mismatch between expected and actual process " "flags.", + "Mismatch between expected and actual process flags.", ) os.remove(temp_filename) diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index c42a8989fb..eb0011d4f1 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -50,12 +50,8 @@ def _load_theta(): class TestQuickplotCoordinatesGiven(test_plot.TestPlotCoordinatesGiven): def setUp(self): tests.GraphicsTest.setUp(self) - filename = tests.get_data_path( - ("PP", "COLPEX", "theta_and_orog_subset.pp") - ) - self.cube = test_plot.load_cube_once( - filename, "air_potential_temperature" - ) + filename = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) + self.cube = test_plot.load_cube_once(filename, "air_potential_temperature") self.draw_module = iris.quickplot self.contourf = test_plot.LambdaStr( @@ -66,9 +62,7 @@ def setUp(self): ) self.contour = test_plot.LambdaStr( "iris.quickplot.contour", - lambda cube, *args, **kwargs: iris.quickplot.contour( - cube, *args, **kwargs - ), + lambda cube, *args, **kwargs: iris.quickplot.contour(cube, *args, **kwargs), ) self.points = test_plot.LambdaStr( "iris.quickplot.points", @@ -78,9 +72,7 @@ def setUp(self): ) self.plot = test_plot.LambdaStr( "iris.quickplot.plot", - lambda cube, *args, **kwargs: iris.quickplot.plot( - cube, *args, **kwargs - ), + lambda cube, *args, **kwargs: iris.quickplot.plot(cube, *args, **kwargs), ) self.results = { @@ -135,9 +127,7 @@ def test_contour(self): qplt.contour(self._small()) self.check_graphic() - qplt.contourf( - self._small(), coords=["model_level_number", "grid_longitude"] - ) + qplt.contourf(self._small(), coords=["model_level_number", "grid_longitude"]) self.check_graphic() def test_contourf(self): @@ -148,14 +138,10 @@ def test_contourf(self): self.check_graphic() - qplt.contourf( - self._small(), coords=["model_level_number", "grid_longitude"] - ) + qplt.contourf(self._small(), coords=["model_level_number", "grid_longitude"]) self.check_graphic() - qplt.contourf( - self._small(), coords=["grid_longitude", "model_level_number"] - ) + qplt.contourf(self._small(), coords=["grid_longitude", "model_level_number"]) self.check_graphic() def test_contourf_axes_specified(self): @@ -293,9 +279,7 @@ def test_horizontal(self): def test_vertical(self): cube = test_plot.simple_cube()[0] - qplt.hist( - cube, bins=np.linspace(287.7, 288.2, 11), orientation="horizontal" - ) + qplt.hist(cube, bins=np.linspace(287.7, 288.2, 11), orientation="horizontal") self.check_graphic() diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py index cf1dc44755..60dd6da153 100644 --- a/lib/iris/tests/test_util.py +++ b/lib/iris/tests/test_util.py @@ -26,9 +26,7 @@ class TestMonotonic(tests.IrisTest): def assertMonotonic(self, array, direction=None, **kwargs): if direction is not None: - mono, dir = iris.util.monotonic( - array, return_direction=True, **kwargs - ) + mono, dir = iris.util.monotonic(array, return_direction=True, **kwargs) if not mono: self.fail("Array was not monotonic:/n %r" % array) if dir != np.sign(direction): @@ -92,15 +90,11 @@ def test_oversize_string(self): # Test with a clip length that means the string will be clipped clip_length = 109 - result = iris.util.clip_string( - self.test_string, clip_length, self.rider - ) + result = iris.util.clip_string(self.test_string, clip_length, self.rider) # Check the length is between what we requested ( + rider length) and the length of the original string self.assertTrue( - clip_length + len(self.rider) - <= len(result) - < len(self.test_string), + clip_length + len(self.rider) <= len(result) < len(self.test_string), "String was not clipped.", ) @@ -114,9 +108,7 @@ def test_undersize_string(self): # Test with a clip length that is longer than the string clip_length = 10999 - result = iris.util.clip_string( - self.test_string, clip_length, self.rider - ) + result = iris.util.clip_string(self.test_string, clip_length, self.rider) self.assertEqual( len(result), len(self.test_string), @@ -132,9 +124,7 @@ def test_undersize_string(self): def test_invalid_clip_lengths(self): # Clip values less than or equal to zero are not valid for clip_length in [0, -100]: - result = iris.util.clip_string( - self.test_string, clip_length, self.rider - ) + result = iris.util.clip_string(self.test_string, clip_length, self.rider) self.assertEqual( len(result), len(self.test_string), @@ -150,9 +140,7 @@ def test_default_values(self): self.test_string, arg_dict["clip_length"], arg_dict["rider"] ) - self.assertLess( - len(result), len(self.test_string), "String was not clipped." - ) + self.assertLess(len(result), len(self.test_string), "String was not clipped.") rider_returned = result[-len(arg_dict["rider"]) :] self.assertEqual( @@ -165,9 +153,7 @@ def test_trim_string_with_no_spaces(self): # Since this string has no spaces, clip_string will not be able to gracefully clip it # but will instead clip it exactly where the user specified - result = iris.util.clip_string( - no_space_string, clip_length, self.rider - ) + result = iris.util.clip_string(no_space_string, clip_length, self.rider) expected_length = clip_length + len(self.rider) @@ -187,9 +173,7 @@ def test_identical(self): test_cube_b = stock.realistic_4d() return_sio = StringIO() - iris.util.describe_diff( - test_cube_a, test_cube_b, output_file=return_sio - ) + iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() self.assertString(return_str, "compatible_cubes.str.txt") @@ -203,9 +187,7 @@ def test_different(self): test_cube_b.attributes["Conventions"] = "CF-1.6" return_sio = StringIO() - iris.util.describe_diff( - test_cube_a, test_cube_b, output_file=return_sio - ) + iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() self.assertString(return_str, "incompatible_attr.str.txt") @@ -217,9 +199,7 @@ def test_different(self): test_cube_a.standard_name = "relative_humidity" return_sio = StringIO() - iris.util.describe_diff( - test_cube_a, test_cube_b, output_file=return_sio - ) + iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() self.assertString(return_str, "incompatible_name.str.txt") @@ -231,9 +211,7 @@ def test_different(self): test_cube_a.units = cf_units.Unit("m") return_sio = StringIO() - iris.util.describe_diff( - test_cube_a, test_cube_b, output_file=return_sio - ) + iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() self.assertString(return_str, "incompatible_unit.str.txt") @@ -245,9 +223,7 @@ def test_different(self): ) return_sio = StringIO() - iris.util.describe_diff( - test_cube_a, test_cube_b, output_file=return_sio - ) + iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() self.assertString(return_str, "incompatible_meth.str.txt") @@ -266,9 +242,7 @@ def test_output_file(self): with self.temp_filename() as filename: with open(filename, "w") as f: - iris.util.describe_diff( - test_cube_a, test_cube_b, output_file=f - ) + iris.util.describe_diff(test_cube_a, test_cube_b, output_file=f) f.close() self.assertFilesEqual(filename, "incompatible_cubes.str.txt") diff --git a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py index 789426e11b..6b908635ff 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py +++ b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py @@ -83,9 +83,7 @@ def check_mdtol(self, mdtol=None): # Prepare: self.assertEqual(prepare.call_count, 1) _, args, kwargs = prepare.mock_calls[0] - self.assertEqual( - self.extract_grid(args[1]), self.extract_grid(target_grid) - ) + self.assertEqual(self.extract_grid(args[1]), self.extract_grid(target_grid)) # Perform: self.assertEqual(perform.call_count, 2) @@ -218,9 +216,7 @@ def test_src_data_different_dims(self): src.add_dim_coord(lat, 1) src.add_dim_coord(lon, 2) result = regridder(src) - self.assertArrayShapeStats( - result, (5, 9, 8), expected_mean, expected_std - ) + self.assertArrayShapeStats(result, (5, 9, 8), expected_mean, expected_std) # Check data with dims in different order # Reshape src so that the coords are ordered [x, z, y], # the mean and std statistics should be the same @@ -230,9 +226,7 @@ def test_src_data_different_dims(self): src.add_dim_coord(levels, 1) src.add_dim_coord(lat, 2) result = regridder(src) - self.assertArrayShapeStats( - result, (8, 5, 9), expected_mean, expected_std - ) + self.assertArrayShapeStats(result, (8, 5, 9), expected_mean, expected_std) # Check data with dims in different order # Reshape src so that the coords are ordered [y, x, z], # the mean and std statistics should be the same @@ -242,9 +236,7 @@ def test_src_data_different_dims(self): src.add_dim_coord(lon, 1) src.add_dim_coord(levels, 2) result = regridder(src) - self.assertArrayShapeStats( - result, (9, 8, 5), expected_mean, expected_std - ) + self.assertArrayShapeStats(result, (9, 8, 5), expected_mean, expected_std) @tests.skip_data @@ -252,9 +244,7 @@ class TestLazy(tests.IrisTest): # Setup def setUp(self) -> None: # Prepare a cube and a template - cube_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_xyt.nc"] - ) + cube_file_path = tests.get_data_path(["NetCDF", "regrid", "regrid_xyt.nc"]) self.cube = load_cube(cube_file_path) template_file_path = tests.get_data_path( diff --git a/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py b/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py index e5f6964e22..cabe5f3361 100644 --- a/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py +++ b/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py @@ -13,10 +13,7 @@ import cf_units import numpy as np -from iris.analysis.cartography import ( - DEFAULT_SPHERICAL_EARTH_RADIUS, - _quadrant_area, -) +from iris.analysis.cartography import DEFAULT_SPHERICAL_EARTH_RADIUS, _quadrant_area class TestExampleCases(tests.IrisTest): @@ -54,9 +51,7 @@ def test_area_in_north_with_reversed_lats(self): self.assertArrayAllClose(area, [[1228800593851.443115234375]]) def test_area_multiple_lats(self): - lats, lons = self._as_bounded_coords( - [[-80, -70], [0, 10], [70, 80]], [0, 10] - ) + lats, lons = self._as_bounded_coords([[-80, -70], [0, 10], [70, 80]], [0, 10]) area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) self.assertArrayAllClose( @@ -112,9 +107,7 @@ def test_too_many_lon_bounds_error(self): self._assert_error_on_malformed_bounds([[0, 10]], [[0, 10, 20]]) def _assert_error_on_malformed_bounds(self, lat_bnds, lon_bnds): - with self.assertRaisesRegex( - ValueError, r"Bounds must be \[n,2\] array" - ): + with self.assertRaisesRegex(ValueError, r"Bounds must be \[n,2\] array"): _quadrant_area(np.array(lat_bnds), np.array(lon_bnds), 1.0) diff --git a/lib/iris/tests/unit/analysis/cartography/test__xy_range.py b/lib/iris/tests/unit/analysis/cartography/test__xy_range.py index eeafc533e4..e218c58ec7 100644 --- a/lib/iris/tests/unit/analysis/cartography/test__xy_range.py +++ b/lib/iris/tests/unit/analysis/cartography/test__xy_range.py @@ -36,9 +36,7 @@ def test_geog_cs_circular(self): assert cube.coord("longitude").circular result = _xy_range(cube) - np.testing.assert_array_almost_equal( - result, ((0, 360), (-90, 90)), decimal=0 - ) + np.testing.assert_array_almost_equal(result, ((0, 360), (-90, 90)), decimal=0) @tests.skip_data def test_geog_cs_regional(self): diff --git a/lib/iris/tests/unit/analysis/cartography/test_area_weights.py b/lib/iris/tests/unit/analysis/cartography/test_area_weights.py index 29c906f0d1..347c40ed6e 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_area_weights.py +++ b/lib/iris/tests/unit/analysis/cartography/test_area_weights.py @@ -18,9 +18,7 @@ def test_latitude_no_units(self): cube.coord("longitude").guess_bounds() cube.coord("latitude").guess_bounds() cube.coord("latitude").units = None - with self.assertRaisesRegex( - ValueError, "Units of degrees or " "radians required" - ): + with self.assertRaisesRegex(ValueError, "Units of degrees or radians required"): iris.analysis.cartography.area_weights(cube) def test_longitude_no_units(self): @@ -28,9 +26,7 @@ def test_longitude_no_units(self): cube.coord("latitude").guess_bounds() cube.coord("longitude").guess_bounds() cube.coord("longitude").units = None - with self.assertRaisesRegex( - ValueError, "Units of degrees or " "radians required" - ): + with self.assertRaisesRegex(ValueError, "Units of degrees or radians required"): iris.analysis.cartography.area_weights(cube) diff --git a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py index f3f8c81583..f3ca30d793 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py +++ b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py @@ -78,9 +78,7 @@ class TestGridcellAngles(tests.IrisTest): def setUp(self): # Make a small "normal" contiguous-bounded cube to test on. # This one is regional. - self.standard_regional_cube = sample_2d_latlons( - regional=True, transformed=True - ) + self.standard_regional_cube = sample_2d_latlons(regional=True, transformed=True) # Record the standard correct angle answers. result_cube = gridcell_angles(self.standard_regional_cube) result_cube.convert_units("degrees") @@ -114,9 +112,7 @@ def _check_multiple_orientations_and_latitudes( angles_expected = (angles_expected + 360.0) % 360.0 # Assert (toleranced) equality, and return results. - self.assertArrayAllClose( - angles_calculated, angles_expected, atol=atol_degrees - ) + self.assertArrayAllClose(angles_calculated, angles_expected, atol=atol_degrees) return angles_calculated, angles_expected @@ -127,17 +123,11 @@ def test_result_form(self): # Check properties of the result cube *other than* the data values. test_cube = self.standard_regional_cube result_cube = self.standard_result_cube - self.assertEqual( - result_cube.long_name, "gridcell_angle_from_true_east" - ) + self.assertEqual(result_cube.long_name, "gridcell_angle_from_true_east") self.assertEqual(result_cube.units, Unit("degrees")) self.assertEqual(len(result_cube.coords()), 2) - self.assertEqual( - result_cube.coord(axis="x"), test_cube.coord(axis="x") - ) - self.assertEqual( - result_cube.coord(axis="y"), test_cube.coord(axis="y") - ) + self.assertEqual(result_cube.coord(axis="x"), test_cube.coord(axis="x")) + self.assertEqual(result_cube.coord(axis="y"), test_cube.coord(axis="y")) def test_bottom_edge_method(self): # Get results with the "other" calculation method + check to tolerance. @@ -156,17 +146,13 @@ def test_bottom_edge_method(self): def test_bounded_coord_args(self): # Check that passing the coords gives the same result as the cube. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) result = gridcell_angles(co_x, co_y) self.assertArrayAllClose(result.data, self.standard_small_cube_results) def test_coords_radians_args(self): # Check it still works with coords converted to radians. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) for coord in (co_x, co_y): coord.convert_units("radians") result = gridcell_angles(co_x, co_y) @@ -174,9 +160,7 @@ def test_coords_radians_args(self): def test_bounds_array_args(self): # Check we can calculate from bounds values alone. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) # Results drawn from coord bounds should be nearly the same, # but not exactly, because of the different 'midpoint' values. result = gridcell_angles(co_x.bounds, co_y.bounds) @@ -186,9 +170,7 @@ def test_bounds_array_args(self): def test_unbounded_regional_coord_args(self): # Remove the coord bounds to check points-based calculation. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) for coord in (co_x, co_y): coord.bounds = None result = gridcell_angles(co_x, co_y) @@ -201,9 +183,7 @@ def test_unbounded_regional_coord_args(self): def test_points_array_args(self): # Check we can calculate from points arrays alone (no coords). - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) # As previous, the leftmost and rightmost columns are not good. result = gridcell_angles(co_x.points, co_y.points) self.assertArrayAllClose( @@ -229,9 +209,7 @@ def test_unbounded_global(self): # In this case, the match is actually rather poor (!). self.assertArrayAllClose(result.data, global_cube_results, atol=7.5) # Leaving off first + last columns again gives a decent result. - self.assertArrayAllClose( - result.data[:, 1:-1], global_cube_results[:, 1:-1] - ) + self.assertArrayAllClose(result.data[:, 1:-1], global_cube_results[:, 1:-1]) # NOTE: although this looks just as bad as 'test_points_array_args', # maximum errors there in the end columns are actually > 100 degrees ! @@ -253,9 +231,7 @@ def test_nonlatlon_coord_system(self): def test_fail_coords_bad_units(self): # Check error with bad coords units. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) co_y.units = "m" with self.assertRaisesRegex(ValueError, "must have angular units"): gridcell_angles(co_x, co_y) @@ -263,24 +239,18 @@ def test_fail_coords_bad_units(self): def test_fail_nonarraylike(self): # Check error with bad args. co_x, co_y = 1, 2 - with self.assertRaisesRegex( - ValueError, "must have array shape property" - ): + with self.assertRaisesRegex(ValueError, "must have array shape property"): gridcell_angles(co_x, co_y) def test_fail_non2d_coords(self): # Check error with bad args. cube = lat_lon_cube() - with self.assertRaisesRegex( - ValueError, "inputs must have 2-dimensional shape" - ): + with self.assertRaisesRegex(ValueError, "inputs must have 2-dimensional shape"): gridcell_angles(cube) def test_fail_different_shapes(self): # Check error with mismatched shapes. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) co_y = co_y[1:] with self.assertRaisesRegex(ValueError, "must have same shape"): gridcell_angles(co_x, co_y) @@ -289,9 +259,7 @@ def test_fail_different_coord_system(self): # Check error with mismatched coord systems. cube = sample_2d_latlons(regional=True, rotated=True) cube.coord(axis="x").coord_system = None - with self.assertRaisesRegex( - ValueError, "must have same coordinate system" - ): + with self.assertRaisesRegex(ValueError, "must have same coordinate system"): gridcell_angles(cube) def test_fail_cube_dims(self): @@ -306,38 +274,24 @@ def test_fail_cube_dims(self): ) cube.remove_coord(co_x) cube.add_aux_coord(co_new_x, (1, 0)) - with self.assertRaisesRegex( - ValueError, "must have the same cube dimensions" - ): + with self.assertRaisesRegex(ValueError, "must have the same cube dimensions"): gridcell_angles(cube) def test_fail_coord_noncoord(self): # Check that passing a coord + an array gives an error. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - with self.assertRaisesRegex( - ValueError, "is a Coordinate, but .* is not" - ): + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) + with self.assertRaisesRegex(ValueError, "is a Coordinate, but .* is not"): gridcell_angles(co_x, co_y.bounds) def test_fail_noncoord_coord(self): # Check that passing an array + a coord gives an error. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - with self.assertRaisesRegex( - ValueError, "is a Coordinate, but .* is not" - ): + co_x, co_y = (self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y")) + with self.assertRaisesRegex(ValueError, "is a Coordinate, but .* is not"): gridcell_angles(co_x.points, co_y) def test_fail_bad_method(self): - with self.assertRaisesRegex( - ValueError, "unrecognised cell_angle_boundpoints" - ): - self._check_multiple_orientations_and_latitudes( - method="something_unknown" - ) + with self.assertRaisesRegex(ValueError, "unrecognised cell_angle_boundpoints"): + self._check_multiple_orientations_and_latitudes(method="something_unknown") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/analysis/cartography/test_project.py b/lib/iris/tests/unit/analysis/cartography/test_project.py index 7b52f4492e..35c22af363 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_project.py +++ b/lib/iris/tests/unit/analysis/cartography/test_project.py @@ -56,19 +56,11 @@ def setUp(self): def test_is_iris_coord_system(self): res, _ = project(self.cube, self.tcs) - self.assertEqual( - res.coord("projection_y_coordinate").coord_system, self.tcs - ) - self.assertEqual( - res.coord("projection_x_coordinate").coord_system, self.tcs - ) + self.assertEqual(res.coord("projection_y_coordinate").coord_system, self.tcs) + self.assertEqual(res.coord("projection_x_coordinate").coord_system, self.tcs) - self.assertIsNot( - res.coord("projection_y_coordinate").coord_system, self.tcs - ) - self.assertIsNot( - res.coord("projection_x_coordinate").coord_system, self.tcs - ) + self.assertIsNot(res.coord("projection_y_coordinate").coord_system, self.tcs) + self.assertIsNot(res.coord("projection_x_coordinate").coord_system, self.tcs) @tests.skip_data def test_bad_resolution_negative(self): diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py index 389dfaeb3a..b694129460 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py @@ -23,9 +23,7 @@ class TestRotateGridVectors(tests.IrisTest): - def _check_angles_calculation( - self, angles_in_degrees=True, nan_angles_mask=None - ): + def _check_angles_calculation(self, angles_in_degrees=True, nan_angles_mask=None): # Check basic maths on a 2d latlon grid. u_cube = sample_2d_latlons(regional=True, transformed=True) u_cube.units = "ms-1" @@ -100,9 +98,7 @@ def test_angles_from_grid(self): v_cube.data[...] = 0.0 # Setup a fake angles result from the inner call to 'gridcell_angles'. - angles_result_data = np.array( - [[0.0, 90.0, 180.0], [-180.0, -90.0, 270.0]] - ) + angles_result_data = np.array([[0.0, 90.0, 180.0], [-180.0, -90.0, 270.0]]) angles_result_cube = Cube(angles_result_data, units="degrees") angles_kwargs = {"this": 2} angles_call_patch = self.patch( @@ -111,13 +107,9 @@ def test_angles_from_grid(self): ) # Call the routine. - result = rotate_grid_vectors( - u_cube, v_cube, grid_angles_kwargs=angles_kwargs - ) + result = rotate_grid_vectors(u_cube, v_cube, grid_angles_kwargs=angles_kwargs) - self.assertEqual( - angles_call_patch.call_args_list, [mock_call(u_cube, this=2)] - ) + self.assertEqual(angles_call_patch.call_args_list, [mock_call(u_cube, this=2)]) out_u, out_v = [cube.data for cube in result] # Records what results should be for the various n*90deg rotations. diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index af1a2b8b42..ec69fc0e20 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -36,12 +36,8 @@ def uv_cubes(x=None, y=None): x2d, y2d = np.meshgrid(x, y) u = 10 * (2 * np.cos(2 * np.deg2rad(x2d) + 3 * np.deg2rad(y2d + 30)) ** 2) v = 20 * np.cos(6 * np.deg2rad(x2d)) - lon = DimCoord( - x, standard_name="grid_longitude", units="degrees", coord_system=cs - ) - lat = DimCoord( - y, standard_name="grid_latitude", units="degrees", coord_system=cs - ) + lon = DimCoord(x, standard_name="grid_longitude", units="degrees", coord_system=cs) + lat = DimCoord(y, standard_name="grid_latitude", units="degrees", coord_system=cs) u_cube = Cube(u, standard_name="x_wind", units="m/s") v_cube = Cube(v, standard_name="y_wind", units="m/s") for cube in (u_cube, v_cube): @@ -160,10 +156,7 @@ def _unrotate_equation( phi_angle ) sin_rot = -( - ( - np.sin(np.radians(trueLongitude) - lambda_angle) - * np.sin(phi_angle) - ) + (np.sin(np.radians(trueLongitude) - lambda_angle) * np.sin(phi_angle)) / np.cos(np.radians(rotated_lats)) ) @@ -183,9 +176,7 @@ def _check_rotated_to_true(self, u_rot, v_rot, target_cs, **kwds): pole_lon = cs_rot.grid_north_pole_longitude rotated_lons = u_rot.coord("grid_longitude").points rotated_lats = u_rot.coord("grid_latitude").points - rotated_lons_2d, rotated_lats_2d = np.meshgrid( - rotated_lons, rotated_lats - ) + rotated_lons_2d, rotated_lats_2d = np.meshgrid(rotated_lons, rotated_lats) rotated_u, rotated_v = u_rot.data, v_rot.data u_ref, v_ref = self._unrotate_equation( rotated_lons_2d, @@ -204,18 +195,14 @@ def test_rotated_to_true__small(self): # Check for a small field with varying data. target_cs = iris.coord_systems.GeogCS(6371229) u_rot, v_rot = uv_cubes() - self._check_rotated_to_true( - u_rot, v_rot, target_cs, rtol=1e-5, atol=0.0005 - ) + self._check_rotated_to_true(u_rot, v_rot, target_cs, rtol=1e-5, atol=0.0005) def test_rotated_to_true_global(self): # Check for global fields with various constant wind values # - constant in the rotated pole system, that is. # We expect less accuracy where this gets close to the true poles. target_cs = iris.coord_systems.GeogCS(6371229) - u_rot, v_rot = uv_cubes( - x=np.arange(0, 360.0, 15), y=np.arange(-89, 89, 10) - ) + u_rot, v_rot = uv_cubes(x=np.arange(0, 360.0, 15), y=np.arange(-89, 89, 10)) for vector in ((1, 0), (0, 1), (1, 1), (-3, -1.5)): u_rot.data[...] = vector[0] v_rot.data[...] = vector[1] @@ -316,21 +303,11 @@ def test_new_coords_transposed(self): self.assertEqual(ut.coord("projection_y_coordinate"), expected_y) self.assertEqual(vt.coord("projection_y_coordinate"), expected_y) # Check dim mapping for 2d coords is yx. - expected_dims = u.coord_dims("grid_latitude") + u.coord_dims( - "grid_longitude" - ) - self.assertEqual( - ut.coord_dims("projection_x_coordinate"), expected_dims - ) - self.assertEqual( - ut.coord_dims("projection_y_coordinate"), expected_dims - ) - self.assertEqual( - vt.coord_dims("projection_x_coordinate"), expected_dims - ) - self.assertEqual( - vt.coord_dims("projection_y_coordinate"), expected_dims - ) + expected_dims = u.coord_dims("grid_latitude") + u.coord_dims("grid_longitude") + self.assertEqual(ut.coord_dims("projection_x_coordinate"), expected_dims) + self.assertEqual(ut.coord_dims("projection_y_coordinate"), expected_dims) + self.assertEqual(vt.coord_dims("projection_x_coordinate"), expected_dims) + self.assertEqual(vt.coord_dims("projection_y_coordinate"), expected_dims) def test_orig_coords(self): u, v = self._uv_cubes_limited_extent() @@ -479,20 +456,12 @@ def test_rotated_to_unrotated(self): ) # Shift longitude from 0 to 360 -> -180 to 180. x2d = np.where(x2d > 180, x2d - 360, x2d) - res_x = res_u.coord( - "projection_x_coordinate", coord_system=orig_cs - ).points - res_y = res_u.coord( - "projection_y_coordinate", coord_system=orig_cs - ).points + res_x = res_u.coord("projection_x_coordinate", coord_system=orig_cs).points + res_y = res_u.coord("projection_y_coordinate", coord_system=orig_cs).points self.assertArrayAlmostEqual(res_x, x2d) self.assertArrayAlmostEqual(res_y, y2d) - res_x = res_v.coord( - "projection_x_coordinate", coord_system=orig_cs - ).points - res_y = res_v.coord( - "projection_y_coordinate", coord_system=orig_cs - ).points + res_x = res_v.coord("projection_x_coordinate", coord_system=orig_cs).points + res_y = res_v.coord("projection_y_coordinate", coord_system=orig_cs).points self.assertArrayAlmostEqual(res_x, x2d) self.assertArrayAlmostEqual(res_y, y2d) diff --git a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py index b83278c3b0..ed90a42d5a 100644 --- a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py +++ b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py @@ -31,9 +31,7 @@ def setUp(self): self.data = np.empty((4, 2, 2)) dim_coords_and_dims = [(y_coord, (1,)), (x_coord, (2,))] self.cube = Cube(self.data, dim_coords_and_dims=dim_coords_and_dims) - self.geometry = shapely.geometry.Polygon( - [(3, 3), (3, 50), (50, 50), (50, 3)] - ) + self.geometry = shapely.geometry.Polygon([(3, 3), (3, 50), (50, 50), (50, 3)]) def test_no_overlap(self): geometry = shapely.geometry.Polygon([(4, 4), (4, 6), (6, 6), (6, 4)]) @@ -42,18 +40,12 @@ def test_no_overlap(self): def test_overlap(self): weights = geometry_area_weights(self.cube, self.geometry) - expected = np.repeat( - [[[0.0, 0.0], [0.0, 1.0]]], self.data.shape[0], axis=0 - ) + expected = np.repeat([[[0.0, 0.0], [0.0, 1.0]]], self.data.shape[0], axis=0) self.assertArrayEqual(weights, expected) def test_overlap_normalize(self): - weights = geometry_area_weights( - self.cube, self.geometry, normalize=True - ) - expected = np.repeat( - [[[0.0, 0.0], [0.0, 0.25]]], self.data.shape[0], axis=0 - ) + weights = geometry_area_weights(self.cube, self.geometry, normalize=True) + expected = np.repeat([[[0.0, 0.0], [0.0, 0.25]]], self.data.shape[0], axis=0) self.assertArrayEqual(weights, expected) @tests.skip_data @@ -104,9 +96,7 @@ def test_distinct_xy_bounds(self): miny = 84.99998474121094 maxy = 89.99998474121094 geometry = shapely.geometry.box(minx, miny, maxx, maxy) - geometry_overshoot = shapely.geometry.box( - minx, miny, maxx_overshoot, maxy - ) + geometry_overshoot = shapely.geometry.box(minx, miny, maxx_overshoot, maxy) weights = geometry_area_weights(cube, geometry) weights_overshoot = geometry_area_weights(cube, geometry_overshoot) target = np.array( @@ -145,12 +135,9 @@ def test_distinct_xy_bounds_pole(self): weights = geometry_area_weights(cube, geometry) self.assertEqual( str(w[-1].message), - "The geometry exceeds the " - "cube's y dimension at the upper end.", - ) - self.assertTrue( - issubclass(w[-1].category, IrisGeometryExceedWarning) + "The geometry exceeds the cube's y dimension at the upper end.", ) + self.assertTrue(issubclass(w[-1].category, IrisGeometryExceedWarning)) target = np.array( [ [0, top_cell_half, top_cell_half, 0], diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py index 574a25ee7d..7a4553dc0f 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py @@ -32,12 +32,8 @@ class ThreeDimCube(tests.IrisTest): def setUp(self): cube = stock.simple_3d_w_multidim_coords() - cube.add_aux_coord( - iris.coords.DimCoord(np.arange(2), "height", units="1"), 0 - ) - cube.add_dim_coord( - iris.coords.DimCoord(np.arange(3), "latitude", units="1"), 1 - ) + cube.add_aux_coord(iris.coords.DimCoord(np.arange(2), "height", units="1"), 0) + cube.add_dim_coord(iris.coords.DimCoord(np.arange(3), "latitude", units="1"), 1) cube.add_dim_coord( iris.coords.DimCoord(np.arange(4), "longitude", units="1"), 2 ) @@ -78,9 +74,7 @@ def test_interpolator_overspecified(self): def test_interpolator_overspecified_scalar(self): # Over specification by means of interpolating over one dimension # coordinate and a scalar coordinate (not mapped to a dimension). - self.cube.add_aux_coord( - iris.coords.AuxCoord(1, long_name="scalar"), None - ) + self.cube.add_aux_coord(iris.coords.AuxCoord(1, long_name="scalar"), None) msg = ( "Coordinates repeat a data dimension - " @@ -111,14 +105,9 @@ def test_interpolate_non_monotonic(self): self.cube.add_aux_coord( iris.coords.AuxCoord([0, 3, 2], long_name="non-monotonic"), 1 ) - msg = ( - "Cannot interpolate over the non-monotonic coordinate " - "non-monotonic." - ) + msg = "Cannot interpolate over the non-monotonic coordinate non-monotonic." with self.assertRaisesRegex(ValueError, msg): - RectilinearInterpolator( - self.cube, ["non-monotonic"], LINEAR, EXTRAPOLATE - ) + RectilinearInterpolator(self.cube, ["non-monotonic"], LINEAR, EXTRAPOLATE) class Test___call___1D(ThreeDimCube): @@ -130,9 +119,7 @@ def setUp(self): def test_interpolate_bad_coord_name(self): with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - RectilinearInterpolator( - self.cube, ["doesn't exist"], LINEAR, EXTRAPOLATE - ) + RectilinearInterpolator(self.cube, ["doesn't exist"], LINEAR, EXTRAPOLATE) def test_interpolate_data_single(self): # Single sample point. @@ -261,9 +248,7 @@ class Test___call___1D_circular(ThreeDimCube): # Note: all these test data interpolation. def setUp(self): ThreeDimCube.setUp(self) - self.cube.coord("longitude")._points = np.linspace( - 0, 360, 4, endpoint=False - ) + self.cube.coord("longitude")._points = np.linspace(0, 360, 4, endpoint=False) self.cube.coord("longitude").circular = True self.cube.coord("longitude").units = "degrees" self.interpolator = RectilinearInterpolator( @@ -334,15 +319,11 @@ def test_fully_wrapped_twice_reversed_mainpoints(self): def test_fully_wrapped_not_circular(self): cube = stock.lat_lon_cube() - new_long = cube.coord("longitude").copy( - cube.coord("longitude").points + 710 - ) + new_long = cube.coord("longitude").copy(cube.coord("longitude").points + 710) cube.remove_coord("longitude") cube.add_dim_coord(new_long, 1) - interpolator = RectilinearInterpolator( - cube, ["longitude"], LINEAR, EXTRAPOLATE - ) + interpolator = RectilinearInterpolator(cube, ["longitude"], LINEAR, EXTRAPOLATE) res = interpolator([-10]) self.assertArrayEqual(res.data, cube[:, 1].data) @@ -480,9 +461,7 @@ def test_interpolate_data_multiple(self): self.assertArrayEqual(coord_res, coord_expected) def test_orthogonal_cube(self): - result_cube = self.interpolator( - [np.int64([0, 1, 1]), np.int32([0, 1])] - ) + result_cube = self.interpolator([np.int64([0, 1, 1]), np.int32([0, 1])]) result_path = ( "experimental", "analysis", @@ -549,9 +528,7 @@ def interpolator(self, method=LINEAR): time_coord = iris.coords.DimCoord( np.arange(0.0, 48.0, 12.0), "time", units="hours since epoch" ) - height_coord = iris.coords.DimCoord( - np.arange(3), "altitude", units="m" - ) + height_coord = iris.coords.DimCoord(np.arange(3), "altitude", units="m") cube.add_dim_coord(time_coord, 0) cube.add_dim_coord(height_coord, 1) return RectilinearInterpolator(cube, ["time"], method, EXTRAPOLATE) @@ -602,9 +579,7 @@ def test_mixed_numbers_and_datetimes_nearest(self): ] ) self.assertEqual(result.coord("time").points.dtype, float) - self.assertArrayEqual( - result.data, [[3, 4, 5], [3, 4, 5], [6, 7, 8], [6, 7, 8]] - ) + self.assertArrayEqual(result.data, [[3, 4, 5], [3, 4, 5], [6, 7, 8], [6, 7, 8]]) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/analysis/maths/__init__.py b/lib/iris/tests/unit/analysis/maths/__init__.py index c77f8ede37..2206968916 100644 --- a/lib/iris/tests/unit/analysis/maths/__init__.py +++ b/lib/iris/tests/unit/analysis/maths/__init__.py @@ -169,9 +169,7 @@ def test_slice(self): keys[dim] = np.newaxis expected_data = self.data_op(cube.data, other.data[tuple(keys)]) msg = "Problem broadcasting cubes when sliced on dimension {}." - self.assertArrayEqual( - res.data, expected_data, err_msg=msg.format(dim) - ) + self.assertArrayEqual(res.data, expected_data, err_msg=msg.format(dim)) class MathsAddOperationMixin: diff --git a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py index 2c97737973..cbdb4ba821 100644 --- a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py +++ b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py @@ -42,9 +42,7 @@ def setUp(self): self.emsg = "Cannot perform inplace {}".format(self.op) def test_float_cubes(self): - result = _inplace_common_checks( - self.float_cube, self.float_cube, self.op - ) + result = _inplace_common_checks(self.float_cube, self.float_cube, self.op) self.assertIsNone(result) def test_int_cubes(self): @@ -52,21 +50,15 @@ def test_int_cubes(self): self.assertIsNone(result) def test_uint_cubes(self): - result = _inplace_common_checks( - self.uint_cube, self.uint_cube, self.op - ) + result = _inplace_common_checks(self.uint_cube, self.uint_cube, self.op) self.assertIsNone(result) def test_float_cube_int_cube(self): - result = _inplace_common_checks( - self.float_cube, self.int_cube, self.op - ) + result = _inplace_common_checks(self.float_cube, self.int_cube, self.op) self.assertIsNone(result) def test_float_cube_uint_cube(self): - result = _inplace_common_checks( - self.float_cube, self.uint_cube, self.op - ) + result = _inplace_common_checks(self.float_cube, self.uint_cube, self.op) self.assertIsNone(result) def test_int_cube_float_cube(self): @@ -78,33 +70,23 @@ def test_uint_cube_float_cube(self): _inplace_common_checks(self.uint_cube, self.float_cube, self.op) def test_float_cube__scalar_int(self): - result = _inplace_common_checks( - self.float_cube, self.scalar_int, self.op - ) + result = _inplace_common_checks(self.float_cube, self.scalar_int, self.op) self.assertIsNone(result) def test_float_cube__scalar_float(self): - result = _inplace_common_checks( - self.float_cube, self.scalar_float, self.op - ) + result = _inplace_common_checks(self.float_cube, self.scalar_float, self.op) self.assertIsNone(result) def test_float_cube__int_array(self): - result = _inplace_common_checks( - self.float_cube, self.int_data, self.op - ) + result = _inplace_common_checks(self.float_cube, self.int_data, self.op) self.assertIsNone(result) def test_float_cube__float_array(self): - result = _inplace_common_checks( - self.float_cube, self.float_data, self.op - ) + result = _inplace_common_checks(self.float_cube, self.float_data, self.op) self.assertIsNone(result) def test_int_cube__scalar_int(self): - result = _inplace_common_checks( - self.int_cube, self.scalar_int, self.op - ) + result = _inplace_common_checks(self.int_cube, self.scalar_int, self.op) self.assertIsNone(result) def test_int_cube_uint_cube(self): @@ -112,9 +94,7 @@ def test_int_cube_uint_cube(self): self.assertIsNone(result) def test_uint_cube_uint_cube(self): - result = _inplace_common_checks( - self.uint_cube, self.uint_cube, self.op - ) + result = _inplace_common_checks(self.uint_cube, self.uint_cube, self.op) self.assertIsNone(result) def test_uint_cube_int_cube(self): diff --git a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py index 3f69118e0f..821f0e8c58 100644 --- a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py +++ b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py @@ -103,9 +103,7 @@ def test_same_result(self): self.assertEqual( dtype, result_dtype, - self._binary_error_message( - op, dtype, dtype, dtype, result_dtype - ), + self._binary_error_message(op, dtype, dtype, dtype, result_dtype), ) for op in self.unary_same_result_ops: result_dtype = _output_dtype(op, dtype) @@ -155,9 +153,7 @@ def test_unary_float(self): self.assertEqual( expected_dtype, result_dtype, - self._unary_error_message( - op, dtype, expected_dtype, result_dtype - ), + self._unary_error_message(op, dtype, expected_dtype, result_dtype), ) def test_binary_float_argument(self): diff --git a/lib/iris/tests/unit/analysis/maths/test_add.py b/lib/iris/tests/unit/analysis/maths/test_add.py index 69078b9a96..60d8a7e242 100644 --- a/lib/iris/tests/unit/analysis/maths/test_add.py +++ b/lib/iris/tests/unit/analysis/maths/test_add.py @@ -52,9 +52,7 @@ def test_reversed_points(self): add(cube1, cube2) -class TestMaskedConstant( - tests.IrisTest, CubeArithmeticMaskedConstantTestMixin -): +class TestMaskedConstant(tests.IrisTest, CubeArithmeticMaskedConstantTestMixin): @property def data_op(self): return operator.add diff --git a/lib/iris/tests/unit/analysis/maths/test_multiply.py b/lib/iris/tests/unit/analysis/maths/test_multiply.py index 945a86a4d1..a039ab6ecc 100644 --- a/lib/iris/tests/unit/analysis/maths/test_multiply.py +++ b/lib/iris/tests/unit/analysis/maths/test_multiply.py @@ -52,9 +52,7 @@ def test_reversed_points(self): multiply(cube1, cube2) -class TestMaskedConstant( - tests.IrisTest, CubeArithmeticMaskedConstantTestMixin -): +class TestMaskedConstant(tests.IrisTest, CubeArithmeticMaskedConstantTestMixin): @property def data_op(self): return operator.mul diff --git a/lib/iris/tests/unit/analysis/maths/test_subtract.py b/lib/iris/tests/unit/analysis/maths/test_subtract.py index 6812176412..ee9cb9992b 100644 --- a/lib/iris/tests/unit/analysis/maths/test_subtract.py +++ b/lib/iris/tests/unit/analysis/maths/test_subtract.py @@ -52,9 +52,7 @@ def test_reversed_points(self): subtract(cube1, cube2) -class TestMaskedConstant( - tests.IrisTest, CubeArithmeticMaskedConstantTestMixin -): +class TestMaskedConstant(tests.IrisTest, CubeArithmeticMaskedConstantTestMixin): @property def data_op(self): return operator.sub diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index 4855b92332..284d52d3f9 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -242,8 +242,7 @@ class Test__regrid__extrapolation_modes(tests.IrisTest): def setUp(self): self.methods = ("linear", "nearest") self.test_dtypes = [ - np.dtype(spec) - for spec in ("i1", "i2", "i4", "i8", "f2", "f4", "f8") + np.dtype(spec) for spec in ("i1", "i2", "i4", "i8", "f2", "f4", "f8") ] def _regrid(self, data, method, extrapolation_mode=None): @@ -256,9 +255,7 @@ def _regrid(self, data, method, extrapolation_mode=None): kwargs = dict(method=method) if extrapolation_mode is not None: kwargs["extrapolation_mode"] = extrapolation_mode - result = regrid( - data, x_dim, y_dim, x_coord, y_coord, grid_x, grid_y, **kwargs - ) + result = regrid(data, x_dim, y_dim, x_coord, y_coord, grid_x, grid_y, **kwargs) return result def test_default_ndarray(self): @@ -477,9 +474,7 @@ def setUp(self): self.args = ("linear", "mask") self.regridder = Regridder(self.cube, self.cube, *self.args) self.lazy_cube = self.cube.copy(da.asarray(self.cube.data)) - self.lazy_regridder = Regridder( - self.lazy_cube, self.lazy_cube, *self.args - ) + self.lazy_regridder = Regridder(self.lazy_cube, self.lazy_cube, *self.args) def test_lazy_regrid(self): result = self.lazy_regridder(self.lazy_cube) @@ -1290,9 +1285,7 @@ def setUp(self): src.add_aux_coord(level_height) src.add_aux_coord(sigma) src.add_aux_coord(surface_altitude, [0, 1]) - hybrid_height = HybridHeightFactory( - level_height, sigma, surface_altitude - ) + hybrid_height = HybridHeightFactory(level_height, sigma, surface_altitude) src.add_aux_factory(hybrid_height) self.src = src @@ -1318,9 +1311,7 @@ def _check_circular_results(self, src_cube, missingmask=""): result = regridder(src_cube) results.append(result) self.assertFalse(result.coord("longitude").circular) - cml = RESULT_DIR + ( - "{}_circular_src{}.cml".format(method, missingmask), - ) + cml = RESULT_DIR + ("{}_circular_src{}.cml".format(method, missingmask),) self.assertCMLApproxData(result, cml) return results @@ -1392,12 +1383,8 @@ def test_circular_src__masked(self): # Make src and dst test cubes. def make_2d_cube(x_points, y_points, data): cube = Cube(data) - y_coord = DimCoord( - y_points, standard_name="latitude", units="degrees" - ) - x_coord = DimCoord( - x_points, standard_name="longitude", units="degrees" - ) + y_coord = DimCoord(y_points, standard_name="latitude", units="degrees") + x_coord = DimCoord(x_points, standard_name="longitude", units="degrees") x_coord.circular = True cube.add_dim_coord(y_coord, 0) cube.add_dim_coord(x_coord, 1) @@ -1429,9 +1416,7 @@ def make_2d_cube(x_points, y_points, data): # masked at the specific expected points. expected_result_data = ma.array(result_basic.data) expected_result_data.mask = result_masks[method] - self.assertMaskedArrayEqual( - result_masked.data, expected_result_data - ) + self.assertMaskedArrayEqual(result_masked.data, expected_result_data) def test_circular_grid(self): # Non-circular src -> circular grid diff --git a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py index 16639c1649..1016844a7f 100644 --- a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py @@ -95,12 +95,8 @@ def test_same_src_as_init(self): self.func_operate, return_value=self.dummy_slice_result ) as patch_operate: result = regridder(src_grid) - patch_setup.assert_called_once_with( - src_grid, self.weights, target_grid - ) - patch_operate.assert_called_once_with( - src_grid, mock.sentinel.regrid_info - ) + patch_setup.assert_called_once_with(src_grid, self.weights, target_grid) + patch_operate.assert_called_once_with(src_grid, mock.sentinel.regrid_info) # The result is a re-merged version of the internal result, so it is # therefore '==' but not the same object. self.assertEqual(result, self.dummy_slice_result) @@ -113,9 +109,7 @@ def test_no_weights(self): with mock.patch( self.func_setup, return_value=mock.sentinel.regrid_info ) as patch_setup: - with mock.patch( - self.func_operate, return_value=self.dummy_slice_result - ): + with mock.patch(self.func_operate, return_value=self.dummy_slice_result): _ = regridder(src_grid) patch_setup.assert_called_once_with(src_grid, None, target_grid) @@ -129,9 +123,7 @@ def test_diff_src_from_init(self): different_src_cube = self.src_grid.copy() # Rename so we can distinguish them. different_src_cube.rename("Different_source") - with mock.patch( - self.func_setup, return_value=mock.sentinel.regrid_info - ): + with mock.patch(self.func_setup, return_value=mock.sentinel.regrid_info): with mock.patch( self.func_operate, return_value=self.dummy_slice_result ) as patch_operate: @@ -156,9 +148,7 @@ def test_caching(self): ) as patch_operate: _ = regridder(src_grid) _ = regridder(different_src_cube) - patch_setup.assert_called_once_with( - src_grid, self.weights, target_grid - ) + patch_setup.assert_called_once_with(src_grid, self.weights, target_grid) self.assertEqual(len(patch_operate.call_args_list), 2) self.assertEqual( patch_operate.call_args_list, @@ -178,15 +168,9 @@ def setUp(self): src.coord("grid_latitude").points, ) coord_system = src.coord("grid_latitude").coord_system - lat = AuxCoord( - new_lat, standard_name="latitude", coord_system=coord_system - ) - lon = AuxCoord( - new_lon, standard_name="longitude", coord_system=coord_system - ) - lat_t = AuxCoord( - new_lat.T, standard_name="latitude", coord_system=coord_system - ) + lat = AuxCoord(new_lat, standard_name="latitude", coord_system=coord_system) + lon = AuxCoord(new_lon, standard_name="longitude", coord_system=coord_system) + lat_t = AuxCoord(new_lat.T, standard_name="latitude", coord_system=coord_system) lon_t = AuxCoord( new_lon.T, standard_name="longitude", coord_system=coord_system ) @@ -269,9 +253,7 @@ def test_bad_src_type(self): self.regridder(np.ones((3, 4))) def test_bad_src_shape(self): - with self.assertRaisesRegex( - ValueError, "not defined on the same source grid" - ): + with self.assertRaisesRegex(ValueError, "not defined on the same source grid"): self.regridder(self.src_grid[::2, ::2]) @@ -303,9 +285,7 @@ def test_multidim(self): # Define some key points in true-lat/lon that have known positions # First 3x2 points in the centre of each output cell. - x_centres, y_centres = np.meshgrid( - grid_x_coord.points, grid_y_coord.points - ) + x_centres, y_centres = np.meshgrid(grid_x_coord.points, grid_y_coord.points) # An extra point also falling in cell 1, 1 x_in11, y_in11 = 26.3, -48.2 # An extra point completely outside the target grid @@ -385,9 +365,7 @@ def test_multidim(self): result.coord("extra_scalar_coord"), src_cube.coord("extra_scalar_coord"), ) - self.assertEqual( - result.coord("longitude"), grid_cube.coord("longitude") - ) + self.assertEqual(result.coord("longitude"), grid_cube.coord("longitude")) self.assertEqual(result.coord("latitude"), grid_cube.coord("latitude")) self.assertMaskedArrayAlmostEqual(result.data, expected_result) diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py index 9bf9621fb4..5ef677ad4a 100644 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py +++ b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py @@ -43,9 +43,7 @@ def setUp(self): py_t = py_0 + 0.7 dyt_0 = self._interpolate_point(py_t, py_0, py_1, d_0, d_1) dyt_1 = self._interpolate_point(py_t, py_0, py_1, d_2, d_3) - self.test_increment = self._interpolate_point( - px_t, px_0, px_1, dyt_0, dyt_1 - ) + self.test_increment = self._interpolate_point(px_t, px_0, px_1, dyt_0, dyt_1) xv, yv = np.meshgrid(newy, newx) self.tgrid = np.dstack((yv, xv)) diff --git a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py index 648aeb8a64..50387e1418 100644 --- a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py +++ b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py @@ -43,9 +43,7 @@ def test_perfect_corr_all_dims(self): def test_incompatible_cubes(self): with self.assertRaises(ValueError): - stats.pearsonr( - self.cube_a[:, 0, :], self.cube_b[0, :, :], "longitude" - ) + stats.pearsonr(self.cube_a[:, 0, :], self.cube_b[0, :, :], "longitude") def test_compatible_cubes(self): r = stats.pearsonr(self.cube_a, self.cube_b, ["latitude", "longitude"]) diff --git a/lib/iris/tests/unit/analysis/test_AreaWeighted.py b/lib/iris/tests/unit/analysis/test_AreaWeighted.py index 1e16e4bcb2..756b55b06b 100644 --- a/lib/iris/tests/unit/analysis/test_AreaWeighted.py +++ b/lib/iris/tests/unit/analysis/test_AreaWeighted.py @@ -28,9 +28,7 @@ def check_call(self, mdtol=None): "iris.analysis.AreaWeightedRegridder", return_value=mock.sentinel.regridder, ) as awr: - regridder = area_weighted.regridder( - mock.sentinel.src, mock.sentinel.target - ) + regridder = area_weighted.regridder(mock.sentinel.src, mock.sentinel.target) awr.assert_called_once_with( mock.sentinel.src, mock.sentinel.target, mdtol=mdtol diff --git a/lib/iris/tests/unit/analysis/test_COUNT.py b/lib/iris/tests/unit/analysis/test_COUNT.py index fa51565474..77722fe637 100644 --- a/lib/iris/tests/unit/analysis/test_COUNT.py +++ b/lib/iris/tests/unit/analysis/test_COUNT.py @@ -41,9 +41,7 @@ def test_not_callable(self): def test_lazy_not_callable(self): with self.assertRaisesRegex(TypeError, "function must be a callable"): - COUNT.lazy_aggregate( - self.lazy_cube.lazy_data(), axis=0, function="wibble" - ) + COUNT.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0, function="wibble") def test_collapse(self): data = COUNT.aggregate(self.cube.data, axis=0, function=self.func) @@ -84,9 +82,7 @@ class Test_lazy_masked(tests.IrisTest): def setUp(self): lazy_data = as_lazy_data(ma.masked_equal([1, 2, 3, 4, 5], 3)) self.lazy_cube = Cube(lazy_data) - self.lazy_cube.add_dim_coord( - DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0 - ) + self.lazy_cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) self.func = lambda x: x >= 3 def test_ma(self): diff --git a/lib/iris/tests/unit/analysis/test_Linear.py b/lib/iris/tests/unit/analysis/test_Linear.py index e98a6f585e..125ee1ad07 100644 --- a/lib/iris/tests/unit/analysis/test_Linear.py +++ b/lib/iris/tests/unit/analysis/test_Linear.py @@ -63,9 +63,7 @@ def check_mode(self, mode=None): "iris.analysis.RectilinearInterpolator", return_value=mock.sentinel.interpolator, ) as ri: - interpolator = linear.interpolator( - mock.sentinel.cube, mock.sentinel.coords - ) + interpolator = linear.interpolator(mock.sentinel.cube, mock.sentinel.coords) if mode is None or mode == "linear": expected_mode = "extrapolate" else: @@ -108,9 +106,7 @@ def check_mode(self, mode=None): "iris.analysis.RectilinearRegridder", return_value=mock.sentinel.regridder, ) as lr: - regridder = linear.regridder( - mock.sentinel.src, mock.sentinel.target - ) + regridder = linear.regridder(mock.sentinel.src, mock.sentinel.target) if mode is None or mode == "linear": expected_mode = "extrapolate" else: diff --git a/lib/iris/tests/unit/analysis/test_Nearest.py b/lib/iris/tests/unit/analysis/test_Nearest.py index 053fca1907..910804e4ab 100644 --- a/lib/iris/tests/unit/analysis/test_Nearest.py +++ b/lib/iris/tests/unit/analysis/test_Nearest.py @@ -62,9 +62,7 @@ def check_mode(self, mode=None): "iris.analysis.RectilinearInterpolator", return_value=mock.sentinel.interpolator, ) as ri: - interpolator = scheme.interpolator( - mock.sentinel.cube, mock.sentinel.coords - ) + interpolator = scheme.interpolator(mock.sentinel.cube, mock.sentinel.coords) if mode is None: expected_mode = "extrapolate" else: @@ -104,9 +102,7 @@ def check_mode(self, mode=None): "iris.analysis.RectilinearRegridder", return_value=mock.sentinel.regridder, ) as rr: - regridder = scheme.regridder( - mock.sentinel.src_grid, mock.sentinel.tgt_grid - ) + regridder = scheme.regridder(mock.sentinel.src_grid, mock.sentinel.tgt_grid) expected_mode = "extrapolate" if mode is None else mode rr.assert_called_once_with( diff --git a/lib/iris/tests/unit/analysis/test_PERCENTILE.py b/lib/iris/tests/unit/analysis/test_PERCENTILE.py index d841619ccc..832e2641d9 100644 --- a/lib/iris/tests/unit/analysis/test_PERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_PERCENTILE.py @@ -172,9 +172,7 @@ def test_chosen_kwargs_passed(self, mocked_mquantiles): if self.lazy: data = as_lazy_data(data) - self.agg_method( - data, axis=axis, percent=percent, alphap=0.6, betap=0.5 - ) + self.agg_method(data, axis=axis, percent=percent, alphap=0.6, betap=0.5) # Trigger calculation for lazy case. as_concrete_data(data) @@ -222,9 +220,7 @@ def test_masked(self): "mdtol is 0." ) with self.assertRaisesRegex(TypeError, emsg): - PERCENTILE.aggregate( - data, axis=0, percent=50, fast_percentile_method=True - ) + PERCENTILE.aggregate(data, axis=0, percent=50, fast_percentile_method=True) def test_masked_mdtol_0(self): # Using (3,11) because np.percentile returns a masked array anyway with @@ -261,9 +257,7 @@ def test_chosen_kwarg_passed(self, mocked_percentile): fast_percentile_method=True, method="nearest", ) - self.assertEqual( - mocked_percentile.call_args.kwargs["method"], "nearest" - ) + self.assertEqual(mocked_percentile.call_args.kwargs["method"], "nearest") class MultiAxisMixin: @@ -381,9 +375,7 @@ def test_chosen_method_kwarg_passed(self, mocked_percentile): self.assertTrue(is_lazy_data(result)) as_concrete_data(result) - self.assertEqual( - mocked_percentile.call_args.kwargs["method"], "nearest" - ) + self.assertEqual(mocked_percentile.call_args.kwargs["method"], "nearest") class Test_lazy_aggregate( diff --git a/lib/iris/tests/unit/analysis/test_PROPORTION.py b/lib/iris/tests/unit/analysis/test_PROPORTION.py index dc890463ae..5a41646f52 100644 --- a/lib/iris/tests/unit/analysis/test_PROPORTION.py +++ b/lib/iris/tests/unit/analysis/test_PROPORTION.py @@ -36,9 +36,7 @@ def test_false_mask(self): # Test corner case where mask is returned as boolean value rather # than boolean array when the mask is unspecified on construction. masked_cube = iris.cube.Cube(ma.array([1, 2, 3, 4, 5])) - masked_cube.add_dim_coord( - DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0 - ) + masked_cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) cube = masked_cube.collapsed("foo", PROPORTION, function=self.func) self.assertArrayEqual(cube.data, ma.array([0.6])) diff --git a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py index 0137a50019..68e42a401c 100644 --- a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py @@ -59,9 +59,7 @@ def test_simple_single_point(self): kwargs = dict(percent=percent) data = np.empty(self.cube_simple.shape) coords = [self.coord_simple] - actual = aggregator.post_process( - self.cube_simple, data, coords, **kwargs - ) + actual = aggregator.post_process(self.cube_simple, data, coords, **kwargs) self.assertEqual(actual.shape, self.cube_simple.shape) self.assertIs(actual.data, data) name = "percentile_over_time" @@ -76,9 +74,7 @@ def test_simple_multiple_points(self): shape = self.cube_simple.shape + percent.shape data = np.empty(shape) coords = [self.coord_simple] - actual = aggregator.post_process( - self.cube_simple, data, coords, **kwargs - ) + actual = aggregator.post_process(self.cube_simple, data, coords, **kwargs) self.assertEqual(actual.shape, percent.shape + self.cube_simple.shape) expected = data.T self.assertArrayEqual(actual.data, expected) @@ -93,9 +89,7 @@ def test_multi_single_point(self): kwargs = dict(percent=percent) data = np.empty(self.cube_multi.shape) coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) + actual = aggregator.post_process(self.cube_multi, data, coords, **kwargs) self.assertEqual(actual.shape, self.cube_multi.shape) self.assertIs(actual.data, data) name = "percentile_over_time" @@ -110,9 +104,7 @@ def test_multi_multiple_points(self): shape = self.cube_multi.shape + percent.shape data = np.empty(shape) coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) + actual = aggregator.post_process(self.cube_multi, data, coords, **kwargs) self.assertEqual(actual.shape, percent.shape + self.cube_multi.shape) expected = np.moveaxis(data, -1, 0) self.assertArrayEqual(actual.data, expected) @@ -129,9 +121,7 @@ def test_multi_multiple_points_lazy(self): shape = self.cube_multi.shape + percent.shape data = da.arange(np.prod(shape)).reshape(shape) coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) + actual = aggregator.post_process(self.cube_multi, data, coords, **kwargs) self.assertEqual(actual.shape, percent.shape + self.cube_multi.shape) self.assertTrue(actual.has_lazy_data()) expected = np.moveaxis(as_concrete_data(data), -1, 0) diff --git a/lib/iris/tests/unit/analysis/test_PointInCell.py b/lib/iris/tests/unit/analysis/test_PointInCell.py index 83453c26d1..471d81261e 100644 --- a/lib/iris/tests/unit/analysis/test_PointInCell.py +++ b/lib/iris/tests/unit/analysis/test_PointInCell.py @@ -21,9 +21,7 @@ def test(self): "iris.analysis.CurvilinearRegridder", return_value=mock.sentinel.regridder, ) as ecr: - regridder = point_in_cell.regridder( - mock.sentinel.src, mock.sentinel.target - ) + regridder = point_in_cell.regridder(mock.sentinel.src, mock.sentinel.target) ecr.assert_called_once_with( mock.sentinel.src, mock.sentinel.target, mock.sentinel.weights diff --git a/lib/iris/tests/unit/analysis/test_RMS.py b/lib/iris/tests/unit/analysis/test_RMS.py index f5da089a9c..48fcf6c334 100644 --- a/lib/iris/tests/unit/analysis/test_RMS.py +++ b/lib/iris/tests/unit/analysis/test_RMS.py @@ -88,9 +88,7 @@ def test_1d(self): def test_2d(self): # 2-dimensional input. - data = as_lazy_data( - np.array([[5, 2, 6, 4], [12, 4, 10, 8]], dtype=np.float64) - ) + data = as_lazy_data(np.array([[5, 2, 6, 4], [12, 4, 10, 8]], dtype=np.float64)) expected_rms = np.array([4.5, 9.0], dtype=np.float64) rms = RMS.lazy_aggregate(data, 1) self.assertArrayAlmostEqual(rms, expected_rms) diff --git a/lib/iris/tests/unit/analysis/test_STD_DEV.py b/lib/iris/tests/unit/analysis/test_STD_DEV.py index 0abf4f9dc3..34bda17f85 100644 --- a/lib/iris/tests/unit/analysis/test_STD_DEV.py +++ b/lib/iris/tests/unit/analysis/test_STD_DEV.py @@ -50,9 +50,7 @@ def test_mdtol(self): array = as_lazy_data(array) var = STD_DEV.lazy_aggregate(array, axis=1, mdtol=0.3) masked_result = as_concrete_data(var) - masked_expected = np.ma.masked_array( - [0.57735, 1.0, 0.707107], mask=[0, 0, 1] - ) + masked_expected = np.ma.masked_array([0.57735, 1.0, 0.707107], mask=[0, 0, 1]) self.assertMaskedArrayAlmostEqual(masked_result, masked_expected) def test_ddof_one(self): diff --git a/lib/iris/tests/unit/analysis/test_SUM.py b/lib/iris/tests/unit/analysis/test_SUM.py index 90be890797..389c69de7e 100644 --- a/lib/iris/tests/unit/analysis/test_SUM.py +++ b/lib/iris/tests/unit/analysis/test_SUM.py @@ -92,9 +92,7 @@ def test_weights_and_returned(self): self.assertArrayEqual(weights, [4, 2, 2, 2, 2]) def test_masked_weights_and_returned(self): - array = ma.array( - self.cube_2d.data, mask=[[0, 0, 1, 0, 0], [0, 0, 0, 1, 0]] - ) + array = ma.array(self.cube_2d.data, mask=[[0, 0, 1, 0, 0], [0, 0, 0, 1, 0]]) data, weights = SUM.aggregate( array, axis=0, weights=self.weights, returned=True ) diff --git a/lib/iris/tests/unit/analysis/test_WPERCENTILE.py b/lib/iris/tests/unit/analysis/test_WPERCENTILE.py index c8bcf5018e..a0e6b860ce 100644 --- a/lib/iris/tests/unit/analysis/test_WPERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_WPERCENTILE.py @@ -16,16 +16,10 @@ class Test_aggregate(tests.IrisTest): def test_missing_mandatory_kwargs(self): - emsg = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'percent'" - ) + emsg = "weighted_percentile aggregator requires .* keyword argument 'percent'" with self.assertRaisesRegex(ValueError, emsg): WPERCENTILE.aggregate("dummy", axis=0, weights=None) - emsg = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'weights'" - ) + emsg = "weighted_percentile aggregator requires .* keyword argument 'weights'" with self.assertRaisesRegex(ValueError, emsg): WPERCENTILE.aggregate("dummy", axis=0, percent=50) @@ -39,9 +33,7 @@ def test_wrong_weights_shape(self): def test_1d_single(self): data = np.arange(11) weights = np.ones(data.shape) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=50, weights=weights) expected = 5 self.assertTupleEqual(actual.shape, ()) self.assertEqual(actual, expected) @@ -62,9 +54,7 @@ def test_masked_1d_single(self): data = ma.arange(11) weights = np.ones(data.shape) data[3:7] = ma.masked - actual = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=50, weights=weights) expected = 7 self.assertTupleEqual(actual.shape, ()) self.assertEqual(actual, expected) @@ -73,9 +63,7 @@ def test_1d_multi(self): data = np.arange(11) weights = np.ones(data.shape) percent = np.array([20, 50, 90]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=percent, weights=weights) expected = [1.7, 5, 9.4] self.assertTupleEqual(actual.shape, percent.shape) self.assertArrayAlmostEqual(actual, expected) @@ -85,9 +73,7 @@ def test_1d_multi_unequal(self): weights = np.ones(data.shape) weights[1::2] = 3 percent = np.array([20, 50, 96]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=percent, weights=weights) expected = [2.25, 6, 11.75] self.assertTupleEqual(actual.shape, percent.shape) self.assertArrayAlmostEqual(actual, expected) @@ -97,9 +83,7 @@ def test_masked_1d_multi(self): weights = np.ones(data.shape) data[3:9] = ma.masked percent = np.array([25, 50, 75]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=percent, weights=weights) expected = [0.75, 2, 9.25] self.assertTupleEqual(actual.shape, percent.shape) self.assertArrayAlmostEqual(actual, expected) @@ -108,9 +92,7 @@ def test_2d_single(self): shape = (2, 11) data = np.arange(np.prod(shape)).reshape(shape) weights = np.ones(shape) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=50, weights=weights) self.assertTupleEqual(actual.shape, shape[-1:]) expected = np.arange(shape[-1]) + 5.5 self.assertArrayEqual(actual, expected) @@ -121,9 +103,7 @@ def test_masked_2d_single(self): data[0, ::2] = ma.masked data[1, 1::2] = ma.masked weights = np.ones(shape) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=50, weights=weights) self.assertTupleEqual(actual.shape, shape[-1:]) expected = np.empty(shape[-1:]) expected[1::2] = data[0, 1::2] @@ -135,9 +115,7 @@ def test_2d_multi(self): data = np.arange(np.prod(shape)).reshape(shape) weights = np.ones(shape) percent = np.array([10, 50, 70, 100]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=percent, weights=weights) self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) expected = np.tile(np.arange(shape[-1]), percent.size).astype("f8") expected = expected.reshape(percent.size, shape[-1]).T @@ -151,9 +129,7 @@ def test_masked_2d_multi(self): weights = np.ones(shape) data[1] = ma.masked percent = np.array([10, 50, 70, 80]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) + actual = WPERCENTILE.aggregate(data, axis=0, percent=percent, weights=weights) self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) expected = np.tile(np.arange(shape[-1]), percent.size).astype("f8") expected = expected.reshape(percent.size, shape[-1]).T @@ -191,12 +167,10 @@ def test(self): class Test_aggregate_shape(tests.IrisTest): def test_missing_mandatory_kwarg(self): emsg_pc = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'percent'" + "weighted_percentile aggregator requires .* keyword argument 'percent'" ) emsg_wt = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'weights'" + "weighted_percentile aggregator requires .* keyword argument 'weights'" ) with self.assertRaisesRegex(ValueError, emsg_pc): WPERCENTILE.aggregate_shape(weights=None) diff --git a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py index a1306063b6..cf47f9dffc 100644 --- a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py @@ -55,16 +55,10 @@ def setUp(self): def test_missing_mandatory_kwarg(self): aggregator = WeightedPercentileAggregator() - emsg = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'percent'" - ) + emsg = "weighted_percentile aggregator requires .* keyword argument 'percent'" with self.assertRaisesRegex(ValueError, emsg): aggregator.aggregate("dummy", axis=0, weights=None) - emsg = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'weights'" - ) + emsg = "weighted_percentile aggregator requires .* keyword argument 'weights'" with self.assertRaisesRegex(ValueError, emsg): aggregator.aggregate("dummy", axis=0, percent=50) @@ -74,9 +68,7 @@ def test_simple_single_point(self): kwargs = dict(percent=percent, weights=self.weights_simple) data = np.empty(self.cube_simple.shape) coords = [self.coord_simple] - actual = aggregator.post_process( - self.cube_simple, data, coords, **kwargs - ) + actual = aggregator.post_process(self.cube_simple, data, coords, **kwargs) self.assertEqual(actual.shape, self.cube_simple.shape) self.assertIs(actual.data, data) name = "weighted_percentile_over_time" @@ -87,9 +79,7 @@ def test_simple_single_point(self): def test_simple_multiple_points(self): aggregator = WeightedPercentileAggregator() percent = np.array([10, 20, 50, 90]) - kwargs = dict( - percent=percent, weights=self.weights_simple, returned=True - ) + kwargs = dict(percent=percent, weights=self.weights_simple, returned=True) shape = self.cube_simple.shape + percent.shape data = np.empty(shape) total_weights = 1.0 @@ -98,9 +88,7 @@ def test_simple_multiple_points(self): self.cube_simple, (data, total_weights), coords, **kwargs ) self.assertEqual(len(actual), 2) - self.assertEqual( - actual[0].shape, percent.shape + self.cube_simple.shape - ) + self.assertEqual(actual[0].shape, percent.shape + self.cube_simple.shape) expected = np.rollaxis(data, -1) self.assertArrayEqual(actual[0].data, expected) self.assertIs(actual[1], total_weights) @@ -115,9 +103,7 @@ def test_multi_single_point(self): kwargs = dict(percent=percent, weights=self.weights_multi) data = np.empty(self.cube_multi.shape) coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) + actual = aggregator.post_process(self.cube_multi, data, coords, **kwargs) self.assertEqual(actual.shape, self.cube_multi.shape) self.assertIs(actual.data, data) name = "weighted_percentile_over_time" @@ -132,9 +118,7 @@ def test_multi_multiple_points(self): shape = self.cube_multi.shape + percent.shape data = np.empty(shape) coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) + actual = aggregator.post_process(self.cube_multi, data, coords, **kwargs) self.assertEqual(actual.shape, percent.shape + self.cube_multi.shape) expected = np.rollaxis(data, -1) self.assertArrayEqual(actual.data, expected) diff --git a/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py b/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py index f70c3e7518..5c8afb97b8 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py @@ -14,9 +14,7 @@ import numpy as np -from iris.analysis.trajectory import ( - UnstructuredNearestNeigbourRegridder as unn_gridder, -) +from iris.analysis.trajectory import UnstructuredNearestNeigbourRegridder as unn_gridder from iris.coord_systems import GeogCS, RotatedGeogCS from iris.coords import AuxCoord, DimCoord from iris.cube import Cube, CubeList @@ -41,15 +39,11 @@ def setUp(self): # Make sample 1-D source cube. src = Cube(src_x_y_value[:, 2]) src.add_aux_coord( - AuxCoord( - src_x_y_value[:, 0], standard_name="longitude", units="degrees" - ), + AuxCoord(src_x_y_value[:, 0], standard_name="longitude", units="degrees"), 0, ) src.add_aux_coord( - AuxCoord( - src_x_y_value[:, 1], standard_name="latitude", units="degrees" - ), + AuxCoord(src_x_y_value[:, 1], standard_name="latitude", units="degrees"), 0, ) self.src_cube = src @@ -84,9 +78,7 @@ def setUp(self): self.src_z_cube = CubeList(z_cubes).merge_cube() # Make a corresponding 3d expected result. - self.expected_data_zxy = self.src_z_cube.data[ - :, expected_result_indices - ] + self.expected_data_zxy = self.src_z_cube.data[:, expected_result_indices] def _check_expected( self, @@ -290,9 +282,7 @@ def test_transposed_source(self): # last one. src_z_cube = self.src_z_cube src_z_cube.transpose((1, 0)) - self._check_expected( - src_cube=src_z_cube, expected_data=self.expected_data_zxy - ) + self._check_expected(src_cube=src_z_cube, expected_data=self.expected_data_zxy) def test_radians_degrees(self): # Check source + target unit conversions, grid and result in degrees. diff --git a/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py b/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py index d30feecadd..2aef358727 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py +++ b/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py @@ -14,9 +14,7 @@ import numpy as np -from iris.analysis.trajectory import ( - _nearest_neighbour_indices_ndcoords as nn_ndinds, -) +from iris.analysis.trajectory import _nearest_neighbour_indices_ndcoords as nn_ndinds from iris.coords import AuxCoord, DimCoord from iris.cube import Cube @@ -43,12 +41,8 @@ def test_nonlatlon_multiple_2d(self): self.assertEqual(result, [(1, 2), (0, 0), (0, 1)]) def test_latlon_simple_2d(self): - co_y = DimCoord( - [10.0, 20.0], standard_name="latitude", units="degrees" - ) - co_x = DimCoord( - [1.0, 2.0, 3.0], standard_name="longitude", units="degrees" - ) + co_y = DimCoord([10.0, 20.0], standard_name="latitude", units="degrees") + co_x = DimCoord([1.0, 2.0, 3.0], standard_name="longitude", units="degrees") cube = Cube(np.zeros((2, 3))) cube.add_dim_coord(co_y, 0) cube.add_dim_coord(co_x, 1) @@ -57,12 +51,8 @@ def test_latlon_simple_2d(self): self.assertEqual(result, [(1, 2)]) def test_latlon_multiple_2d(self): - co_y = DimCoord( - [10.0, 20.0], standard_name="latitude", units="degrees" - ) - co_x = DimCoord( - [1.0, 2.0, 3.0], standard_name="longitude", units="degrees" - ) + co_y = DimCoord([10.0, 20.0], standard_name="latitude", units="degrees") + co_x = DimCoord([1.0, 2.0, 3.0], standard_name="longitude", units="degrees") cube = Cube(np.zeros((2, 3))) cube.add_dim_coord(co_y, 0) cube.add_dim_coord(co_x, 1) diff --git a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py index c156354f8f..0d28427ddb 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py @@ -70,12 +70,8 @@ def single_point(self, src_cube): ] # Work out cube indices of the testpoint. - single_point_iy = np.where(src_cube.coord("latitude").points == y_val)[ - 0 - ][0] - single_point_ix = np.where( - src_cube.coord("longitude").points == x_val - )[0][0] + single_point_iy = np.where(src_cube.coord("latitude").points == y_val)[0][0] + single_point_ix = np.where(src_cube.coord("longitude").points == x_val)[0][0] point = namedtuple("point", "ix iy sample_point") return point(single_point_ix, single_point_iy, sample_point) @@ -99,18 +95,14 @@ def expected_multipoint_cube(self, src_cube): expected.add_aux_coord(co_x, 1) # Result 'latitude' is now an aux coord containing 4*[0]. expected.remove_coord("latitude") - co_y = AuxCoord( - [0, 0, 0, 0], standard_name="latitude", units="degrees" - ) + co_y = AuxCoord([0, 0, 0, 0], standard_name="latitude", units="degrees") expected.add_aux_coord(co_y, 1) return expected def test_single_point_same_cube(self, src_cube, single_point): # Check exact result matching for a single point. - result = interpolate( - src_cube, single_point.sample_point, method="nearest" - ) + result = interpolate(src_cube, single_point.sample_point, method="nearest") # Check that the result is a single trajectory point, exactly equal to # the expected part of the original data. assert result.shape[-1] == 1 @@ -138,9 +130,7 @@ def test_mask_preserved( result = interpolate(src_cube, multi_sample_points, method="nearest") assert result == expected_multipoint_cube - assert np.allclose( - result.data.mask, expected_multipoint_cube.data.mask - ) + assert np.allclose(result.data.mask, expected_multipoint_cube.data.mask) def test_dtype_preserved( self, src_cube, multi_sample_points, expected_multipoint_cube @@ -157,9 +147,7 @@ def test_aux_coord_noninterpolation_dim(self, src_cube, single_point): src_cube.add_aux_coord(DimCoord([17, 19], long_name="aux0"), 0) # The result cube should exactly equal a single source point. - result = interpolate( - src_cube, single_point.sample_point, method="nearest" - ) + result = interpolate(src_cube, single_point.sample_point, method="nearest") assert result.shape[-1] == 1 result = result[..., 0] expected = src_cube[:, single_point.iy, single_point.ix] @@ -167,14 +155,10 @@ def test_aux_coord_noninterpolation_dim(self, src_cube, single_point): def test_aux_coord_one_interp_dim(self, src_cube, single_point): # Check exact result with an aux-coord over one interpolation dims. - src_cube.add_aux_coord( - AuxCoord([11, 12, 13, 14], long_name="aux_x"), 2 - ) + src_cube.add_aux_coord(AuxCoord([11, 12, 13, 14], long_name="aux_x"), 2) # The result cube should exactly equal a single source point. - result = interpolate( - src_cube, single_point.sample_point, method="nearest" - ) + result = interpolate(src_cube, single_point.sample_point, method="nearest") assert result.shape[-1] == 1 result = result[..., 0] expected = src_cube[:, single_point.iy, single_point.ix] @@ -191,9 +175,7 @@ def test_aux_coord_both_interp_dims(self, src_cube, single_point): ) # The result cube should exactly equal a single source point. - result = interpolate( - src_cube, single_point.sample_point, method="nearest" - ) + result = interpolate(src_cube, single_point.sample_point, method="nearest") assert result.shape[-1] == 1 result = result[..., 0] expected = src_cube[:, single_point.iy, single_point.ix] @@ -221,9 +203,7 @@ def test_metadata(self, src_cube, single_point): # attributes and cell-methods. src_cube.attributes["ODD_ATTR"] = "string-value-example" src_cube.add_cell_method(iris.coords.CellMethod("mean", "area")) - result = interpolate( - src_cube, single_point.sample_point, method="nearest" - ) + result = interpolate(src_cube, single_point.sample_point, method="nearest") # Check that the result is a single trajectory point, exactly equal to # the expected part of the original data. assert result.shape[-1] == 1 @@ -289,9 +269,7 @@ def test_multi_point_same_cube(self): expected.add_aux_coord(co_x, 1) # Result 'latitude' is now an aux coord containing 4*[0]. expected.remove_coord("latitude") - co_y = AuxCoord( - [0, 0, 0, 0], standard_name="latitude", units="degrees" - ) + co_y = AuxCoord([0, 0, 0, 0], standard_name="latitude", units="degrees") expected.add_aux_coord(co_y, 1) self.assertEqual(result, expected) diff --git a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py index 88da4ca463..68bd98e652 100644 --- a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py @@ -169,9 +169,7 @@ def coord_dims(coord): @staticmethod def derive(pressure_at_top, sigma, surface_air_pressure, coord=True): - result = pressure_at_top + sigma * ( - surface_air_pressure - pressure_at_top - ) + result = pressure_at_top + sigma * (surface_air_pressure - pressure_at_top) if coord: name = "air_pressure" result = AuxCoord( @@ -208,15 +206,11 @@ def test_derived_coord(self): # Broadcast expected points given the known dimensional mapping pressure_at_top = self.pressure_at_top.points[0] sigma = self.sigma.points[..., np.newaxis, np.newaxis] - surface_air_pressure = self.surface_air_pressure.points[ - np.newaxis, ... - ] + surface_air_pressure = self.surface_air_pressure.points[np.newaxis, ...] # Calculate the expected result - expected_coord = self.derive( - pressure_at_top, sigma, surface_air_pressure - ) + expected_coord = self.derive(pressure_at_top, sigma, surface_air_pressure) # Calculate the actual result factory = AtmosphereSigmaFactory(**self.kwargs) @@ -253,9 +247,7 @@ def test_pressure_at_top(self): self.assertIs(self.factory.pressure_at_top, new_pressure_at_top) def test_pressure_at_top_wrong_shape(self): - new_pressure_at_top = mock.Mock( - units=Unit("Pa"), nbounds=0, shape=(2,) - ) + new_pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(2,)) with self.assertRaises(ValueError): self.factory.update(self.pressure_at_top, new_pressure_at_top) @@ -276,19 +268,13 @@ def test_sigma_incompatible_units(self): def test_surface_air_pressure(self): new_surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) - self.factory.update( - self.surface_air_pressure, new_surface_air_pressure - ) - self.assertIs( - self.factory.surface_air_pressure, new_surface_air_pressure - ) + self.factory.update(self.surface_air_pressure, new_surface_air_pressure) + self.assertIs(self.factory.surface_air_pressure, new_surface_air_pressure) def test_surface_air_pressure_incompatible_units(self): new_surface_air_pressure = mock.Mock(units=Unit("mbar"), nbounds=0) with self.assertRaises(ValueError): - self.factory.update( - self.surface_air_pressure, new_surface_air_pressure - ) + self.factory.update(self.surface_air_pressure, new_surface_air_pressure) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py index 619a0482b6..8dbadc4938 100644 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py @@ -141,13 +141,9 @@ def test_lazy_complex(self): @tests.skip_data class Test_lazy_aux_coords(tests.IrisTest): def setUp(self): - path = tests.get_data_path( - ["NetCDF", "testing", "small_theta_colpex.nc"] - ) + path = tests.get_data_path(["NetCDF", "testing", "small_theta_colpex.nc"]) # While loading, "turn off" loading small variables as real data. - with mock.patch( - "iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0 - ): + with mock.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0): self.cube = iris.load_cube(path, "air_potential_temperature") def _check_lazy(self): diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py index 31f791f10e..c3b57e4fe8 100644 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py @@ -25,9 +25,7 @@ class Test___init__(tests.IrisTest): def setUp(self): self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock( - units=cf_units.Unit("Pa"), nbounds=0 - ) + self.surface_air_pressure = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) def test_insufficient_coords(self): with self.assertRaises(ValueError): @@ -126,9 +124,7 @@ class Test_dependencies(tests.IrisTest): def setUp(self): self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock( - units=cf_units.Unit("Pa"), nbounds=0 - ) + self.surface_air_pressure = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) def test_value(self): kwargs = dict( @@ -143,18 +139,14 @@ def test_value(self): class Test_make_coord(tests.IrisTest): @staticmethod def coords_dims_func(coord): - mapping = dict( - level_pressure=(0,), sigma=(0,), surface_air_pressure=(1, 2) - ) + mapping = dict(level_pressure=(0,), sigma=(0,), surface_air_pressure=(1, 2)) return mapping[coord.name()] def setUp(self): self.delta = iris.coords.DimCoord( [0.0, 1.0, 2.0], long_name="level_pressure", units="Pa" ) - self.sigma = iris.coords.DimCoord( - [1.0, 0.9, 0.8], long_name="sigma", units="1" - ) + self.sigma = iris.coords.DimCoord([1.0, 0.9, 0.8], long_name="sigma", units="1") self.surface_air_pressure = iris.coords.AuxCoord( np.arange(4).reshape(2, 2), "surface_air_pressure", units="Pa" ) @@ -248,9 +240,7 @@ class Test_update(tests.IrisTest): def setUp(self): self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock( - units=cf_units.Unit("Pa"), nbounds=0 - ) + self.surface_air_pressure = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) self.factory = HybridPressureFactory( delta=self.delta, diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py index 4bd85d1e3b..a21e8f3a42 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py @@ -168,9 +168,7 @@ def test_values(self): class Test_make_coord(tests.IrisTest): @staticmethod def coord_dims(coord): - mapping = dict( - s=(0,), eta=(1, 2), depth=(1, 2), a=(), b=(), depth_c=() - ) + mapping = dict(s=(0,), eta=(1, 2), depth=(1, 2), a=(), b=(), depth_c=()) return mapping[coord.name()] @staticmethod diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py index 349b4cfcb6..d6afbcc4c0 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py @@ -171,12 +171,8 @@ def derive(s, c, eta, depth, depth_c, coord=True): return result def setUp(self): - self.s = DimCoord( - np.linspace(-0.985, -0.014, 36), units="1", long_name="s" - ) - self.c = DimCoord( - np.linspace(-0.959, -0.001, 36), units="1", long_name="c" - ) + self.s = DimCoord(np.linspace(-0.985, -0.014, 36), units="1", long_name="s") + self.c = DimCoord(np.linspace(-0.959, -0.001, 36), units="1", long_name="c") self.eta = AuxCoord( np.arange(-1, 3, dtype=np.float64).reshape(2, 2), long_name="eta", diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py index 3304cf121d..8770168fd7 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py @@ -171,12 +171,8 @@ def derive(s, c, eta, depth, depth_c, coord=True): return result def setUp(self): - self.s = DimCoord( - np.linspace(-0.985, -0.014, 36), units="1", long_name="s" - ) - self.c = DimCoord( - np.linspace(-0.959, -0.001, 36), units="1", long_name="c" - ) + self.s = DimCoord(np.linspace(-0.985, -0.014, 36), units="1", long_name="s") + self.c = DimCoord(np.linspace(-0.959, -0.001, 36), units="1", long_name="c") self.eta = AuxCoord( np.arange(-1, 3, dtype=np.float64).reshape(2, 2), long_name="eta", diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py index a03afa661d..5a1c99a2f1 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py @@ -97,9 +97,7 @@ def derive(sigma, eta, depth, coord=True): return result def setUp(self): - self.sigma = DimCoord( - np.linspace(-0.05, -1, 5), long_name="sigma", units="1" - ) + self.sigma = DimCoord(np.linspace(-0.05, -1, 5), long_name="sigma", units="1") self.eta = AuxCoord( np.arange(-1, 3, dtype=np.float64).reshape(2, 2), long_name="eta", diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py index a191fac978..fc83312aa0 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py @@ -259,9 +259,7 @@ def test_derived_points_with_bounds(self): nsigma = self.nsigma.points.reshape(nsigma.shape + (1,)) zlev = self.zlev.bounds.reshape(zlev.shape + (2,)) # Calculate the expected bounds. - bounds = self.derive( - sigma, eta, depth, depth_c, nsigma, zlev, coord=False - ) + bounds = self.derive(sigma, eta, depth, depth_c, nsigma, zlev, coord=False) expected_coord.bounds = bounds # Calculate the actual result. factory = OceanSigmaZFactory(**self.kwargs) diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py index 9bff110942..145b8e604b 100644 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -50,9 +50,7 @@ def service2(): services = (service1, service2) lenient = _Lenient(*services) - self.expected.update( - {_qualname(service1): True, _qualname(service2): True} - ) + self.expected.update({_qualname(service1): True, _qualname(service2): True}) self.assertEqual(self.expected, lenient.__dict__) def test_kwargs_client_str(self): @@ -64,9 +62,7 @@ def test_kwargs_client_str(self): def test_kwargs_clients_str(self): clients = dict(client1="service1", client2="service2") lenient = _Lenient(**clients) - self.expected.update( - dict(client1=("service1",), client2=("service2",)) - ) + self.expected.update(dict(client1=("service1",), client2=("service2",))) self.assertEqual(self.expected, lenient.__dict__) def test_kwargs_clients_callable(self): @@ -379,9 +375,7 @@ def service(): qualname_service = _qualname(service) self.lenient.__dict__[qualname_client] = None self.lenient[client] = service - self.assertEqual( - self.lenient.__dict__[qualname_client], (qualname_service,) - ) + self.assertEqual(self.lenient.__dict__[qualname_client], (qualname_service,)) def test_in_value_bool(self): client = "client" @@ -398,9 +392,7 @@ def client(): self.lenient.__dict__[qualname_client] = None self.lenient[client] = True self.assertTrue(self.lenient.__dict__[qualname_client]) - self.assertFalse( - isinstance(self.lenient.__dict__[qualname_client], Iterable) - ) + self.assertFalse(isinstance(self.lenient.__dict__[qualname_client], Iterable)) def test_in_value_iterable(self): client = "client" @@ -446,9 +438,7 @@ def service2(): self.lenient.__dict__[qualname_client] = None qualname_services = (_qualname(service1), _qualname(service2)) self.lenient[client] = (service1, service2) - self.assertEqual( - self.lenient.__dict__[qualname_client], qualname_services - ) + self.assertEqual(self.lenient.__dict__[qualname_client], qualname_services) def test_active_iterable(self): active = "active" @@ -476,9 +466,7 @@ def client(): def test_enable(self): enable = "enable" - self.assertEqual( - self.lenient.__dict__[enable], _LENIENT_ENABLE_DEFAULT - ) + self.assertEqual(self.lenient.__dict__[enable], _LENIENT_ENABLE_DEFAULT) self.lenient[enable] = True self.assertTrue(self.lenient.__dict__[enable]) self.lenient[enable] = False @@ -650,9 +638,7 @@ def service(): qualname_service = _qualname(service) self.lenient.register_client(client, service) self.assertIn(qualname_client, self.lenient.__dict__) - self.assertEqual( - self.lenient.__dict__[qualname_client], (qualname_service,) - ) + self.assertEqual(self.lenient.__dict__[qualname_client], (qualname_service,)) def test_callable_services_callable(self): def client(): @@ -668,9 +654,7 @@ def service2(): qualname_services = (_qualname(service1), _qualname(service2)) self.lenient.register_client(client, (service1, service2)) self.assertIn(qualname_client, self.lenient.__dict__) - self.assertEqual( - self.lenient.__dict__[qualname_client], qualname_services - ) + self.assertEqual(self.lenient.__dict__[qualname_client], qualname_services) def test_services_empty(self): emsg = "Require at least one .* client service." @@ -692,9 +676,7 @@ def test_services_append(self): self.lenient.__dict__[client] = services self.assertEqual(self.lenient[client], services) new_services = ("service3", "service4") - self.lenient.register_client( - client, services=new_services, append=True - ) + self.lenient.register_client(client, services=new_services, append=True) expected = set(services + new_services) self.assertEqual(set(self.lenient[client]), expected) @@ -719,9 +701,7 @@ def service(): self.assertNotIn(qualname_service, self.lenient.__dict__) self.lenient.register_service(service) self.assertIn(qualname_service, self.lenient.__dict__) - self.assertFalse( - isinstance(self.lenient.__dict__[qualname_service], Iterable) - ) + self.assertFalse(isinstance(self.lenient.__dict__[qualname_service], Iterable)) self.assertTrue(self.lenient.__dict__[qualname_service]) def test_not_protected(self): diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_client.py b/lib/iris/tests/unit/common/lenient/test__lenient_client.py index b604e49608..6942d577a5 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_client.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_client.py @@ -40,9 +40,7 @@ def test_args_and_kwargs(self): def func(): pass - emsg = ( - "Invalid lenient client, got both arguments and keyword arguments" - ) + emsg = "Invalid lenient client, got both arguments and keyword arguments" with self.assertRaisesRegex(AssertionError, emsg): _lenient_client(func, services=func) diff --git a/lib/iris/tests/unit/common/lenient/test__qualname.py b/lib/iris/tests/unit/common/lenient/test__qualname.py index 6e2eb23bc6..1c7bec0aaa 100644 --- a/lib/iris/tests/unit/common/lenient/test__qualname.py +++ b/lib/iris/tests/unit/common/lenient/test__qualname.py @@ -31,9 +31,7 @@ def test_callable_function_local(self): def myfunc(): pass - qualname_func = self.locals.format( - "test_callable_function_local", "myfunc" - ) + qualname_func = self.locals.format("test_callable_function_local", "myfunc") result = _qualname(myfunc) self.assertEqual(result, qualname_func) diff --git a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py index 196ab48d20..7444c27f8a 100644 --- a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py @@ -201,9 +201,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) + self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) @@ -320,9 +318,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) + self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) @@ -389,12 +385,8 @@ def test_op_lenient_different(self): rexpected["units"] = lexpected["units"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -416,12 +408,8 @@ def test_op_strict_different(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_none(self): left = self.values.copy() @@ -435,12 +423,8 @@ def test_op_strict_different_none(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) class Test_equal(tests.IrisTest): diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py index e7434922cf..395b724433 100644 --- a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py @@ -60,9 +60,7 @@ def test_str(self): units=self.units, attributes={}, ) - expected = ( - f"BaseMetadata(var_name={self.var_name!r}, units={self.units!r})" - ) + expected = f"BaseMetadata(var_name={self.var_name!r}, units={self.units!r})" self.assertEqual(expected, str(metadata)) def test__fields(self): @@ -105,9 +103,7 @@ def test_cannot_compare_different_class(self): def test_lenient(self): return_value = sentinel.return_value - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ) as mlenient: + with mock.patch("iris.common.metadata._LENIENT", return_value=True) as mlenient: with mock.patch.object( self.cls, "_compare_lenient", return_value=return_value ) as mcompare: @@ -176,9 +172,7 @@ def setUp(self): def test_notimplemented(self): return_value = NotImplemented - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as mocker: + with mock.patch.object(self.cls, "__eq__", return_value=return_value) as mocker: result = self.metadata.__ne__(self.other) self.assertIs(return_value, result) @@ -189,9 +183,7 @@ def test_notimplemented(self): def test_negate_true(self): return_value = True - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as mocker: + with mock.patch.object(self.cls, "__eq__", return_value=return_value) as mocker: result = self.metadata.__ne__(self.other) self.assertFalse(result) @@ -201,9 +193,7 @@ def test_negate_true(self): def test_negate_false(self): return_value = False - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as mocker: + with mock.patch.object(self.cls, "__eq__", return_value=return_value) as mocker: result = self.metadata.__ne__(self.other) self.assertTrue(result) @@ -227,9 +217,7 @@ def setUp(self): def test_lenient(self): return_value = sentinel._combine_lenient other = sentinel.other - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ) as mlenient: + with mock.patch("iris.common.metadata._LENIENT", return_value=True) as mlenient: with mock.patch.object( self.cls, "_combine_lenient", return_value=return_value ) as mcombine: @@ -746,24 +734,16 @@ def test_different(self): right = self.values.copy() left["two"] = left["four"] = self.dummy - self.assertFalse( - self.metadata._compare_lenient_attributes(left, right) - ) - self.assertFalse( - self.metadata._compare_lenient_attributes(right, left) - ) + self.assertFalse(self.metadata._compare_lenient_attributes(left, right)) + self.assertFalse(self.metadata._compare_lenient_attributes(right, left)) def test_different_none(self): left = self.values.copy() right = self.values.copy() left["one"] = left["three"] = left["five"] = None - self.assertFalse( - self.metadata._compare_lenient_attributes(left, right) - ) - self.assertFalse( - self.metadata._compare_lenient_attributes(right, left) - ) + self.assertFalse(self.metadata._compare_lenient_attributes(left, right)) + self.assertFalse(self.metadata._compare_lenient_attributes(right, left)) def test_extra(self): left = self.values.copy() @@ -836,9 +816,7 @@ def setUp(self): def test_lenient(self): return_value = sentinel._difference_lenient other = sentinel.other - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ) as mlenient: + with mock.patch("iris.common.metadata._LENIENT", return_value=True) as mlenient: with mock.patch.object( self.cls, "_difference_lenient", return_value=return_value ) as mdifference: @@ -863,9 +841,7 @@ def test_strict(self): other = self.cls(**values) method = "_difference_strict_attributes" with mock.patch("iris.common.metadata._LENIENT", return_value=False): - with mock.patch.object( - self.cls, method, return_value=None - ) as mdifference: + with mock.patch.object(self.cls, method, return_value=None) as mdifference: result = self.metadata._difference(other) expected = [ @@ -879,9 +855,7 @@ def test_strict(self): self.assertEqual(expected, args) self.assertEqual(dict(), kwargs) - with mock.patch.object( - self.cls, method, return_value=None - ) as mdifference: + with mock.patch.object(self.cls, method, return_value=None) as mdifference: result = other._difference(self.metadata) expected = [ @@ -1461,9 +1435,7 @@ def test_cannot_compare_different_class(self): def test_lenient_default(self): return_value = sentinel.return_value - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as mocker: + with mock.patch.object(self.cls, "__eq__", return_value=return_value) as mocker: result = self.metadata.equal(self.metadata) self.assertEqual(return_value, result) diff --git a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py index 25b287909d..19a17e087a 100644 --- a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py @@ -247,9 +247,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) + self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) @@ -414,9 +412,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) + self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) @@ -482,12 +478,8 @@ def test_op_lenient_same_measure_none(self): rexpected["measure"] = (None, sentinel.measure) with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different(self): left = self.values.copy() @@ -501,12 +493,8 @@ def test_op_lenient_different(self): rexpected["units"] = lexpected["units"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different_measure(self): left = self.values.copy() @@ -520,12 +508,8 @@ def test_op_lenient_different_measure(self): rexpected["measure"] = lexpected["measure"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -547,12 +531,8 @@ def test_op_strict_different(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_measure(self): left = self.values.copy() @@ -566,12 +546,8 @@ def test_op_strict_different_measure(self): rexpected["measure"] = lexpected["measure"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_none(self): left = self.values.copy() @@ -585,12 +561,8 @@ def test_op_strict_different_none(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_measure_none(self): left = self.values.copy() @@ -604,12 +576,8 @@ def test_op_strict_different_measure_none(self): rexpected["measure"] = lexpected["measure"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) class Test_equal(tests.IrisTest): diff --git a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py index dac1f26f35..0e9303a0a4 100644 --- a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py @@ -135,9 +135,7 @@ def test_op_lenient_same_members_none(self): right[member] = None rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=True): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -158,9 +156,7 @@ def test_op_lenient_different_members(self): right[member] = self.dummy rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=True): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -189,9 +185,7 @@ def test_op_strict_different_members(self): right[member] = self.dummy rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=False): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -212,9 +206,7 @@ def test_op_strict_different_members_none(self): right[member] = None rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=False): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -266,9 +258,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) + self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) @@ -333,15 +323,9 @@ def test_op_lenient_same_members_none(self): rmetadata = self.cls(**right) expected = right.copy() - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertTrue( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertTrue( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertTrue(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_lenient_different(self): lmetadata = self.cls(**self.values) @@ -364,15 +348,9 @@ def test_op_lenient_different_members(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -404,15 +382,9 @@ def test_op_strict_different_members(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_strict_different_none(self): lmetadata = self.cls(**self.values) @@ -435,15 +407,9 @@ def test_op_strict_different_members_none(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) class Test_difference(tests.IrisTest): @@ -462,9 +428,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) + self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) @@ -531,15 +495,9 @@ def test_op_lenient_same_members_none(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = (None, member_value) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different(self): left = self.values.copy() @@ -553,12 +511,8 @@ def test_op_lenient_different(self): rexpected["units"] = lexpected["units"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different_members(self): for member in self.cls._members: @@ -572,15 +526,9 @@ def test_op_lenient_different_members(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -602,12 +550,8 @@ def test_op_strict_different(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_members(self): for member in self.cls._members: @@ -621,15 +565,9 @@ def test_op_strict_different_members(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_none(self): left = self.values.copy() @@ -643,12 +581,8 @@ def test_op_strict_different_none(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_members_none(self): for member in self.cls._members: @@ -662,15 +596,9 @@ def test_op_strict_different_members_none(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) class Test_equal(tests.IrisTest): diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 4425ba62d7..b27dac282b 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -216,10 +216,7 @@ def extract_result_value(input, extract_global): # For "difference", input.attributes is a *pair* of dictionaries. assert isinstance(result, tuple) result = tuple( - [ - extract_attribute_value(dic, extract_global) - for dic in result - ] + [extract_attribute_value(dic, extract_global) for dic in result] ) if result == (None, None): # This value occurs when the desired attribute is *missing* from a @@ -292,9 +289,7 @@ def check_splitattrs_testcase( test-factors, i.e. global/local attribute type, and right-left order. """ # Just for comfort, check that inputs are all one of a few single characters. - assert all( - (item in list("ABCDX")) for item in (primary_inputs + secondary_inputs) - ) + assert all((item in list("ABCDX")) for item in (primary_inputs + secondary_inputs)) # Interpret "primary" and "secondary" inputs as "global" and "local" attributes. if check_global_not_local: global_values, local_values = primary_inputs, secondary_inputs @@ -326,9 +321,7 @@ def check_splitattrs_testcase( input_l, input_r = input_r, input_l # Run the actual operation - result = getattr(input_l, operation_name)( - input_r, lenient=check_is_lenient - ) + result = getattr(input_l, operation_name)(input_r, lenient=check_is_lenient) if operation_name == "difference" and check_reversed: # Adjust the result of a "reversed" operation to the 'normal' way round. @@ -346,9 +339,7 @@ def check_splitattrs_testcase( if operation_name == "equal" and expected: # Account for the equality cases made `False` by mismatched secondary values. left, right = secondary_inputs - secondaries_same = left == right or ( - check_is_lenient and "X" in (left, right) - ) + secondaries_same = left == right or (check_is_lenient and "X" in (left, right)) if not secondaries_same: expected = False @@ -488,9 +479,7 @@ def test_op_same(self, op_leniency): lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check equality both l==r and r==l. assert lmetadata.__eq__(rmetadata) assert rmetadata.__eq__(lmetadata) @@ -513,13 +502,9 @@ def test_op_different__none(self, fieldname, op_leniency): expect_success = is_lenient else: # Ensure we are handling all the different field cases - raise ValueError( - f"{self.__name__} unhandled fieldname : {fieldname}" - ) + raise ValueError(f"{self.__name__} unhandled fieldname : {fieldname}") - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check equality both l==r and r==l. assert lmetadata.__eq__(rmetadata) == expect_success assert rmetadata.__eq__(lmetadata) == expect_success @@ -547,13 +532,9 @@ def test_op_different__value(self, fieldname, op_leniency): expect_success = is_lenient else: # Ensure we are handling all the different field cases - raise ValueError( - f"{self.__name__} unhandled fieldname : {fieldname}" - ) + raise ValueError(f"{self.__name__} unhandled fieldname : {fieldname}") - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check equality both l==r and r==l. assert lmetadata.__eq__(rmetadata) == expect_success assert rmetadata.__eq__(lmetadata) == expect_success @@ -566,9 +547,7 @@ def test_op_different__attribute_extra(self, op_leniency): rmetadata = self.cls(**self.rvalues) # This counts as equal *only* in the lenient case. expect_success = is_lenient - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check equality both l==r and r==l. assert lmetadata.__eq__(rmetadata) == expect_success assert rmetadata.__eq__(lmetadata) == expect_success @@ -580,9 +559,7 @@ def test_op_different__attribute_value(self, op_leniency): self.rvalues["attributes"]["_extra_"] = mock.sentinel.value2 lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # This should ALWAYS fail. assert not lmetadata.__eq__(rmetadata) assert not rmetadata.__eq__(lmetadata) @@ -677,9 +654,7 @@ def test_op_same(self, op_leniency): rmetadata = self.cls(**self.rvalues) expected = self.lvalues - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check both l+r and r+l assert lmetadata.combine(rmetadata)._asdict() == expected assert rmetadata.combine(lmetadata)._asdict() == expected @@ -705,9 +680,7 @@ def test_op_different__none(self, fieldname, op_leniency): strict_result = not is_lenient else: # Ensure we are handling all the different field cases - raise ValueError( - f"{self.__name__} unhandled fieldname : {fieldname}" - ) + raise ValueError(f"{self.__name__} unhandled fieldname : {fieldname}") if strict_result: # include only those which both have @@ -716,9 +689,7 @@ def test_op_different__none(self, fieldname, op_leniency): # also include those which only 1 has expected = self.lvalues - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check both l+r and r+l assert lmetadata.combine(rmetadata)._asdict() == expected assert rmetadata.combine(lmetadata)._asdict() == expected @@ -740,9 +711,7 @@ def test_op_different__value(self, fieldname, op_leniency): expected = self.lvalues.copy() expected[fieldname] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check both l+r and r+l assert lmetadata.combine(rmetadata)._asdict() == expected assert rmetadata.combine(lmetadata)._asdict() == expected @@ -764,9 +733,7 @@ def test_op_different__attribute_extra(self, op_leniency): # .. it should not expected = self.lvalues - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check both l+r and r+l assert lmetadata.combine(rmetadata)._asdict() == expected assert rmetadata.combine(lmetadata)._asdict() == expected @@ -791,9 +758,7 @@ def test_op_different__attribute_value(self, op_leniency): expected = self.lvalues.copy() expected["attributes"] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # Check both l+r and r+l assert lmetadata.combine(rmetadata)._asdict() == expected assert rmetadata.combine(lmetadata)._asdict() == expected @@ -855,9 +820,7 @@ def test_op_same(self, op_leniency): lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): assert lmetadata.difference(rmetadata) is None assert rmetadata.difference(lmetadata) is None @@ -880,14 +843,10 @@ def test_op_different__none(self, fieldname, op_leniency): strict_result = not is_lenient else: # Ensure we are handling all the different field cases - raise ValueError( - f"{self.__name__} unhandled fieldname : {fieldname}" - ) + raise ValueError(f"{self.__name__} unhandled fieldname : {fieldname}") if strict_result: - diffentry = tuple( - [getattr(mm, fieldname) for mm in (lmetadata, rmetadata)] - ) + diffentry = tuple([getattr(mm, fieldname) for mm in (lmetadata, rmetadata)]) # NOTE: in these cases, the difference metadata will fail an == operation, # because of the 'None' entries. # But we can use metadata._asdict() and test that. @@ -896,9 +855,7 @@ def test_op_different__none(self, fieldname, op_leniency): rexpected = lexpected.copy() rexpected[fieldname] = diffentry[::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): if strict_result: assert lmetadata.difference(rmetadata)._asdict() == lexpected assert rmetadata.difference(lmetadata)._asdict() == rexpected @@ -946,9 +903,7 @@ def test_op_different__attribute_extra(self, op_leniency): rexpected = lexpected.copy() rexpected["attributes"] = diffentry[::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): if is_lenient: # It recognises no difference assert lmetadata.difference(rmetadata) is None @@ -984,9 +939,7 @@ def test_op_different__attribute_value(self, op_leniency): rexpected = lexpected.copy() rexpected["attributes"] = diffentry[::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): # As calculated above -- same for both strict + lenient assert lmetadata.difference(rmetadata)._asdict() == lexpected assert rmetadata.difference(lmetadata)._asdict() == rexpected diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py index 1608b1c42e..be7d52c716 100644 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -48,10 +48,7 @@ def _members(self): self.assertEqual(self.names(Metadata.__bases__), expected) expected = ["Metadata", "object"] self.assertEqual(self.names(Metadata.__mro__), expected) - emsg = ( - "Can't instantiate abstract class .* with abstract " - "method.* _members" - ) + emsg = "Can't instantiate abstract class .* with abstract method.* _members" with self.assertRaisesRegex(TypeError, emsg): _ = Metadata() diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py index 340b6a5355..d00bf95b2b 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -9,11 +9,7 @@ import numpy as np -from iris.common.metadata import ( - CoordMetadata, - DimCoordMetadata, - metadata_filter, -) +from iris.common.metadata import CoordMetadata, DimCoordMetadata, metadata_filter from iris.coords import AuxCoord # Import iris.tests first so that some things can be initialised before @@ -83,16 +79,12 @@ def test_var_name(self): def test_attributes(self): # Confirm that this can handle attrib dicts including np arrays. - attrib_one_two = Mock( - attributes={"one": np.arange(1), "two": np.arange(2)} - ) + attrib_one_two = Mock(attributes={"one": np.arange(1), "two": np.arange(2)}) attrib_three_four = Mock( attributes={"three": np.arange(3), "four": np.arange(4)} ) input_list = [attrib_one_two, attrib_three_four] - result = metadata_filter( - input_list, attributes=attrib_one_two.attributes - ) + result = metadata_filter(input_list, attributes=attrib_one_two.attributes) self.assertIn(attrib_one_two, result) self.assertNotIn(attrib_three_four, result) diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py index 1bf342004d..a700585aa2 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -99,9 +99,7 @@ def test__not_is_cls(self): def test__not_values(self): standard_name = mock.sentinel.standard_name - other = metadata_manager_factory( - BaseMetadata, standard_name=standard_name - ) + other = metadata_manager_factory(BaseMetadata, standard_name=standard_name) self.assertEqual(other.standard_name, standard_name) self.assertIsNone(other.long_name) self.assertIsNone(other.var_name) @@ -114,9 +112,7 @@ def test__same_default(self): self.assertEqual(self.metadata, other) def test__same(self): - kwargs = dict( - standard_name=1, long_name=2, var_name=3, units=4, attributes=5 - ) + kwargs = dict(standard_name=1, long_name=2, var_name=3, units=4, attributes=5) metadata = metadata_manager_factory(BaseMetadata, **kwargs) other = metadata_manager_factory(BaseMetadata, **kwargs) self.assertEqual(metadata.values._asdict(), kwargs) diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index 55d2ca5d79..650524b0a8 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -84,9 +84,7 @@ def setUp(self): def test_standard_name__valid(self): standard_name = "air_temperature" self.item.standard_name = standard_name - self.assertEqual( - self.item._metadata_manager.standard_name, standard_name - ) + self.assertEqual(self.item._metadata_manager.standard_name, standard_name) def test_standard_name__none(self): self.item.standard_name = None @@ -176,9 +174,7 @@ def test_dict(self): metadata = dict(**self.args) self.item.metadata = metadata self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot( - self.item._metadata_manager.attributes, self.attributes - ) + self.assertIsNot(self.item._metadata_manager.attributes, self.attributes) def test_dict__partial(self): metadata = dict(**self.args) @@ -186,17 +182,13 @@ def test_dict__partial(self): self.item.metadata = metadata metadata["standard_name"] = mock.sentinel.standard_name self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot( - self.item._metadata_manager.attributes, self.attributes - ) + self.assertIsNot(self.item._metadata_manager.attributes, self.attributes) def test_ordereddict(self): metadata = self.args self.item.metadata = metadata self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot( - self.item._metadata_manager.attributes, self.attributes - ) + self.assertIsNot(self.item._metadata_manager.attributes, self.attributes) def test_ordereddict__partial(self): metadata = self.args @@ -217,9 +209,7 @@ def test_tuple(self): ] ) self.assertEqual(result, metadata) - self.assertIsNot( - self.item._metadata_manager.attributes, self.attributes - ) + self.assertIsNot(self.item._metadata_manager.attributes, self.attributes) def test_tuple__missing(self): metadata = list(self.args.values()) @@ -235,12 +225,8 @@ def test_namedtuple(self): ) metadata = Metadata(**self.args) self.item.metadata = metadata - self.assertEqual( - self.item._metadata_manager.values, metadata._asdict() - ) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) + self.assertEqual(self.item._metadata_manager.values, metadata._asdict()) + self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) def test_namedtuple__partial(self): Metadata = namedtuple( @@ -256,22 +242,14 @@ def test_namedtuple__partial(self): def test_class_ancillaryvariablemetadata(self): metadata = AncillaryVariableMetadata(**self.args) self.item.metadata = metadata - self.assertEqual( - self.item._metadata_manager.values, metadata._asdict() - ) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) + self.assertEqual(self.item._metadata_manager.values, metadata._asdict()) + self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) def test_class_basemetadata(self): metadata = BaseMetadata(**self.args) self.item.metadata = metadata - self.assertEqual( - self.item._metadata_manager.values, metadata._asdict() - ) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) + self.assertEqual(self.item._metadata_manager.values, metadata._asdict()) + self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) def test_class_cellmeasuremetadata(self): self.args["measure"] = None @@ -280,14 +258,10 @@ def test_class_cellmeasuremetadata(self): expected = metadata._asdict() del expected["measure"] self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) + self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) def test_class_connectivitymetadata(self): - self.args.update( - dict(cf_role=None, start_index=None, location_axis=None) - ) + self.args.update(dict(cf_role=None, start_index=None, location_axis=None)) metadata = ConnectivityMetadata(**self.args) self.item.metadata = metadata expected = metadata._asdict() @@ -295,9 +269,7 @@ def test_class_connectivitymetadata(self): del expected["start_index"] del expected["location_axis"] self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) + self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) def test_class_coordmetadata(self): self.args.update(dict(coord_system=None, climatological=False)) @@ -307,9 +279,7 @@ def test_class_coordmetadata(self): del expected["coord_system"] del expected["climatological"] self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) + self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) def test_class_cubemetadata(self): self.args["cell_methods"] = None @@ -318,9 +288,7 @@ def test_class_cubemetadata(self): expected = metadata._asdict() del expected["cell_methods"] self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) + self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) class Test_rename(tests.IrisTest): diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 182cbbd61c..7445045fdb 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -271,9 +271,7 @@ def test_metadata_same(self): def test_metadata_overlap(self): # configure the lhs cube category - category_lhs = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) + category_lhs = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords pairs = [ (sentinel.dim_metadata1, sentinel.dims1), @@ -294,9 +292,7 @@ def test_metadata_overlap(self): category_lhs.items_scalar.extend(self._create_items(pairs)) # configure the rhs cube category - category_rhs = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) + category_rhs = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords category_rhs.items_dim.append(category_lhs.items_dim[0]) pairs = [(sentinel.dim_metadata200, sentinel.dims2)] @@ -360,9 +356,7 @@ def test_metadata_overlap(self): def test_metadata_different(self): # configure the lhs cube category - category_lhs = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) + category_lhs = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords pairs = [ (sentinel.dim_metadata1, sentinel.dims1), @@ -383,9 +377,7 @@ def test_metadata_different(self): category_lhs.items_scalar.extend(self._create_items(pairs)) # configure the rhs cube category - category_rhs = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) + category_rhs = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords pairs = [ (sentinel.dim_metadata100, sentinel.dims1), @@ -674,12 +666,8 @@ def test(self): self.resolve._metadata_coverage() self.assertEqual(2, self.mocker_dim_coverage.call_count) calls = [ - mock.call( - self.m_lhs_cube, self.m_items_dim, [self.m_items_dim_metadata] - ), - mock.call( - self.m_rhs_cube, self.m_items_dim, [self.m_items_dim_metadata] - ), + mock.call(self.m_lhs_cube, self.m_items_dim, [self.m_items_dim_metadata]), + mock.call(self.m_rhs_cube, self.m_items_dim, [self.m_items_dim_metadata]), ] self.assertEqual(calls, self.mocker_dim_coverage.call_args_list) self.assertEqual(2, self.mocker_aux_coverage.call_count) @@ -813,15 +801,9 @@ def setUp(self): dims_free=None, ) self.items = [ - _Item( - metadata=sentinel.metadata0, coord=sentinel.coord0, dims=[0] - ), - _Item( - metadata=sentinel.metadata1, coord=sentinel.coord1, dims=[1] - ), - _Item( - metadata=sentinel.metadata2, coord=sentinel.coord2, dims=[2] - ), + _Item(metadata=sentinel.metadata0, coord=sentinel.coord0, dims=[0]), + _Item(metadata=sentinel.metadata1, coord=sentinel.coord1, dims=[1]), + _Item(metadata=sentinel.metadata2, coord=sentinel.coord2, dims=[2]), ] def test_no_mapping(self): @@ -1563,13 +1545,9 @@ def _make_cube(self, name, shape, transpose_shape=None): self.args["metadata"] = sentinel.metadata self.reshape = sentinel.reshape m_reshape = mock.Mock(return_value=self.reshape) - self.transpose = mock.Mock( - shape=transpose_shape, reshape=m_reshape - ) + self.transpose = mock.Mock(shape=transpose_shape, reshape=m_reshape) m_transpose = mock.Mock(return_value=self.transpose) - self.data = mock.Mock( - shape=shape, transpose=m_transpose, reshape=m_reshape - ) + self.data = mock.Mock(shape=shape, transpose=m_transpose, reshape=m_reshape) m_copy = mock.Mock(return_value=self.data) m_core_data = mock.Mock(copy=m_copy) self.args["core_data"] = mock.Mock(return_value=m_core_data) @@ -1637,9 +1615,7 @@ def test_incompatible_shapes__fail_broadcast(self): self.resolve._as_compatible_cubes() def _check_compatible(self, broadcast_shape): - self.assertEqual( - self.resolve.lhs_cube, self.resolve._tgt_cube_resolved - ) + self.assertEqual(self.resolve.lhs_cube, self.resolve._tgt_cube_resolved) self.assertEqual(self.cube, self.resolve._src_cube_resolved) self.assertEqual(broadcast_shape, self.resolve._broadcast_shape) self.assertEqual(1, self.mocker.call_count) @@ -1708,12 +1684,8 @@ def test_compatible__transpose(self): self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=tgt_shape) self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual( - [mock.call([2, 1, 0])], self.data.transpose.call_args_list - ) - self.assertEqual( - [mock.call(self.transpose)], self.mocker.call_args_list - ) + self.assertEqual([mock.call([2, 1, 0])], self.data.transpose.call_args_list) + self.assertEqual([mock.call(self.transpose)], self.mocker.call_args_list) def test_compatible__reshape(self): # key: (state) c=common, f=free @@ -1763,9 +1735,7 @@ def test_compatible__transpose_reshape(self): self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=tgt_shape) self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual( - [mock.call([2, 1, 0])], self.data.transpose.call_args_list - ) + self.assertEqual([mock.call([2, 1, 0])], self.data.transpose.call_args_list) self.assertEqual(1, self.data.reshape.call_count) self.assertEqual( [mock.call((1,) + transpose_shape)], @@ -1819,9 +1789,7 @@ def test_compatible__broadcast_transpose_reshape(self): self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=(5, 4, 3, 2)) self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual( - [mock.call([2, 1, 0])], self.data.transpose.call_args_list - ) + self.assertEqual([mock.call([2, 1, 0])], self.data.transpose.call_args_list) self.assertEqual(1, self.data.reshape.call_count) self.assertEqual( [mock.call((1,) + transpose_shape)], @@ -1859,9 +1827,7 @@ def setUp(self): self.m_aux_mapping = self.patch( "iris.common.resolve.Resolve._aux_mapping", return_value={} ) - self.m_free_mapping = self.patch( - "iris.common.resolve.Resolve._free_mapping" - ) + self.m_free_mapping = self.patch("iris.common.resolve.Resolve._free_mapping") self.m_as_compatible_cubes = self.patch( "iris.common.resolve.Resolve._as_compatible_cubes" ) @@ -1959,9 +1925,7 @@ def test_mapped__dim_coords_and_free_dims(self): free_mapping = {0: 1} self.src_cube.ndim = 3 self.m_dim_mapping.return_value = dim_mapping - side_effect = lambda a, b, c, d: self.resolve.mapping.update( - free_mapping - ) + side_effect = lambda a, b, c, d: self.resolve.mapping.update(free_mapping) self.m_free_mapping.side_effect = side_effect self.resolve._metadata_mapping() self.assertEqual(self.mapping, self.resolve.mapping) @@ -2031,9 +1995,7 @@ def test_mapped__dim_coords_free_flip_with_free_flip(self): self.src_cube.ndim = 3 self.tgt_cube.ndim = 3 self.m_dim_mapping.return_value = dim_mapping - side_effect = lambda a, b, c, d: self.resolve.mapping.update( - free_mapping - ) + side_effect = lambda a, b, c, d: self.resolve.mapping.update(free_mapping) self.m_free_mapping.side_effect = side_effect self.tgt_dim_coverage.dims_free = [0, 1] self.tgt_aux_coverage.dims_free = [0, 1] @@ -2202,9 +2164,7 @@ def _check(self, ignore_mismatch=None, bad_points=None): container=self.container, ), ] - self.assertEqual( - expected, self.resolve.prepared_category.items_dim - ) + self.assertEqual(expected, self.resolve.prepared_category.items_dim) else: self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) self.assertEqual(3, self.m_prepare_points_and_bounds.call_count) @@ -2233,17 +2193,11 @@ def _check(self, ignore_mismatch=None, bad_points=None): ignore_mismatch=ignore_mismatch, ), ] - self.assertEqual( - expected, self.m_prepare_points_and_bounds.call_args_list - ) + self.assertEqual(expected, self.m_prepare_points_and_bounds.call_args_list) if not bad_points: self.assertEqual(3, self.src_metadata.combine.call_count) - expected = [ - mock.call(metadata) for metadata in self.tgt_metadata[1:] - ] - self.assertEqual( - expected, self.src_metadata.combine.call_args_list - ) + expected = [mock.call(metadata) for metadata in self.tgt_metadata[1:]] + self.assertEqual(expected, self.src_metadata.combine.call_args_list) def test__default_ignore_mismatch(self): self._check() @@ -2310,15 +2264,9 @@ def setUp(self): sentinel.combined_2, ) self.src_metadata = [ - mock.Mock( - combine=mock.Mock(return_value=self.metadata_combined[0]) - ), - mock.Mock( - combine=mock.Mock(return_value=self.metadata_combined[1]) - ), - mock.Mock( - combine=mock.Mock(return_value=self.metadata_combined[2]) - ), + mock.Mock(combine=mock.Mock(return_value=self.metadata_combined[0])), + mock.Mock(combine=mock.Mock(return_value=self.metadata_combined[1])), + mock.Mock(combine=mock.Mock(return_value=self.metadata_combined[2])), ] self.src_coords = [ # N.B. these need to mimic a Coord with points and bounds, but also @@ -2428,9 +2376,7 @@ def _check(self, ignore_mismatch=None, bad_points=None): ignore_mismatch=ignore_mismatch, ), ] - self.assertEqual( - expected, self.m_prepare_points_and_bounds.call_args_list - ) + self.assertEqual(expected, self.m_prepare_points_and_bounds.call_args_list) if not bad_points: for src_metadata, tgt_metadata in zip( self.src_metadata, self.tgt_metadata[1:] @@ -2655,9 +2601,7 @@ def test_coord_ndim_equal__shape_unequal_with_unsupported_broadcasting( src_coord, tgt_coord, src_dims, tgt_dims ) - def _populate( - self, src_points, tgt_points, src_bounds=None, tgt_bounds=None - ): + def _populate(self, src_points, tgt_points, src_bounds=None, tgt_bounds=None): # key: (state) c=common, f=free # (coord) x=coord # @@ -2673,12 +2617,8 @@ def _populate( mapping = {0: 0, 1: 1} self.resolve.mapping = mapping self.resolve.map_rhs_to_lhs = True - self.resolve.rhs_cube = self.Cube( - name=lambda: sentinel.src_cube, shape=None - ) - self.resolve.lhs_cube = self.Cube( - name=lambda: sentinel.tgt_cube, shape=None - ) + self.resolve.rhs_cube = self.Cube(name=lambda: sentinel.src_cube, shape=None) + self.resolve.lhs_cube = self.Cube(name=lambda: sentinel.tgt_cube, shape=None) ndim = 1 src_dims = 1 self.src_items["ndim"] = ndim @@ -3040,9 +2980,7 @@ def test_src_local_with_tgt_free(self): self.prepared_item, self.resolve.prepared_category.items_dim[0] ) self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [ - mock.call(src_coord, mapping[src_dim], src_metadata=src_metadata) - ] + expected = [mock.call(src_coord, mapping[src_dim], src_metadata=src_metadata)] self.assertEqual(expected, self.m_create_prepared_item.call_args_list) def test_src_local_with_tgt_free__strict(self): @@ -3553,9 +3491,7 @@ def test_src_no_local_with_tgt_no_local__strict(self): src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) def test_src_no_local_with_tgt_no_local__src_scalar_cube(self): @@ -3572,9 +3508,7 @@ def test_src_no_local_with_tgt_no_local__src_scalar_cube_strict(self): src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) def test_src_local_with_tgt_no_local(self): @@ -3582,9 +3516,7 @@ def test_src_local_with_tgt_no_local(self): self.src_coverage["cube"] = self.Cube(ndim=ndim) src_metadata = sentinel.src_metadata src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) @@ -3592,9 +3524,7 @@ def test_src_local_with_tgt_no_local(self): self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) expected = [self.src_prepared_item] self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(src_coord, self.src_dims, src_metadata=src_metadata) - ] + expected = [mock.call(src_coord, self.src_dims, src_metadata=src_metadata)] self.assertEqual(expected, self.m_create_prepared_item.call_args_list) def test_src_local_with_tgt_no_local__strict(self): @@ -3602,16 +3532,12 @@ def test_src_local_with_tgt_no_local__strict(self): self.src_coverage["cube"] = self.Cube(ndim=ndim) src_metadata = sentinel.src_metadata src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) def test_src_local_with_tgt_no_local__src_scalar_cube(self): @@ -3619,9 +3545,7 @@ def test_src_local_with_tgt_no_local__src_scalar_cube(self): self.src_coverage["cube"] = self.Cube(ndim=ndim) src_metadata = sentinel.src_metadata src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) @@ -3629,9 +3553,7 @@ def test_src_local_with_tgt_no_local__src_scalar_cube(self): self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) expected = [self.src_prepared_item] self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(src_coord, self.src_dims, src_metadata=src_metadata) - ] + expected = [mock.call(src_coord, self.src_dims, src_metadata=src_metadata)] self.assertEqual(expected, self.m_create_prepared_item.call_args_list) def test_src_local_with_tgt_no_local__src_scalar_cube_strict(self): @@ -3639,16 +3561,12 @@ def test_src_local_with_tgt_no_local__src_scalar_cube_strict(self): self.src_coverage["cube"] = self.Cube(ndim=ndim) src_metadata = sentinel.src_metadata src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) def test_src_no_local_with_tgt_local(self): @@ -3658,18 +3576,14 @@ def test_src_no_local_with_tgt_local(self): src_coverage = _AuxCoverage(**self.src_coverage) tgt_metadata = sentinel.tgt_metadata tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) expected = [self.tgt_prepared_item] self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) - ] + expected = [mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] self.assertEqual(expected, self.m_create_prepared_item.call_args_list) def test_src_no_local_with_tgt_local__strict(self): @@ -3679,15 +3593,11 @@ def test_src_no_local_with_tgt_local__strict(self): src_coverage = _AuxCoverage(**self.src_coverage) tgt_metadata = sentinel.tgt_metadata tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) def test_src_no_local_with_tgt_local__src_scalar_cube(self): @@ -3697,18 +3607,14 @@ def test_src_no_local_with_tgt_local__src_scalar_cube(self): src_coverage = _AuxCoverage(**self.src_coverage) tgt_metadata = sentinel.tgt_metadata tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) expected = [self.tgt_prepared_item] self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) - ] + expected = [mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] self.assertEqual(expected, self.m_create_prepared_item.call_args_list) def test_src_no_local_with_tgt_local__src_scalar_cube_strict(self): @@ -3718,21 +3624,15 @@ def test_src_no_local_with_tgt_local__src_scalar_cube_strict(self): src_coverage = _AuxCoverage(**self.src_coverage) tgt_metadata = sentinel.tgt_metadata tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) expected = [self.tgt_prepared_item] self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) - ] + expected = [mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] self.assertEqual(expected, self.m_create_prepared_item.call_args_list) def test_src_local_with_tgt_local(self): @@ -3740,16 +3640,12 @@ def test_src_local_with_tgt_local(self): self.src_coverage["cube"] = self.Cube(ndim=ndim) src_metadata = sentinel.src_metadata src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_metadata = sentinel.tgt_metadata tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) @@ -3767,22 +3663,16 @@ def test_src_local_with_tgt_local__strict(self): self.src_coverage["cube"] = self.Cube(ndim=ndim) src_metadata = sentinel.src_metadata src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_metadata = sentinel.tgt_metadata tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) def test_src_local_with_tgt_local__src_scalar_cube(self): @@ -3790,16 +3680,12 @@ def test_src_local_with_tgt_local__src_scalar_cube(self): self.src_coverage["cube"] = self.Cube(ndim=ndim) src_metadata = sentinel.src_metadata src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_metadata = sentinel.tgt_metadata tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) @@ -3817,22 +3703,16 @@ def test_src_local_with_tgt_local__src_scalar_cube_strict(self): self.src_coverage["cube"] = self.Cube(ndim=ndim) src_metadata = sentinel.src_metadata src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) + src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_metadata = sentinel.tgt_metadata tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) + tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) + self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) @@ -3888,12 +3768,8 @@ def setUp(self): self.m_prepare_common_aux_payload = self.patch( f"{root}._prepare_common_aux_payload" ) - self.m_prepare_local_payload = self.patch( - f"{root}._prepare_local_payload" - ) - self.m_prepare_factory_payload = self.patch( - f"{root}._prepare_factory_payload" - ) + self.m_prepare_local_payload = self.patch(f"{root}._prepare_local_payload") + self.m_prepare_factory_payload = self.patch(f"{root}._prepare_factory_payload") def _check(self): self.assertIsNone(self.resolve.prepared_category) @@ -3904,9 +3780,7 @@ def _check(self): self.assertEqual([], self.resolve.prepared_factories) self.assertEqual(1, self.m_prepare_common_dim_payload.call_count) expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual( - expected, self.m_prepare_common_dim_payload.call_args_list - ) + self.assertEqual(expected, self.m_prepare_common_dim_payload.call_args_list) self.assertEqual(2, self.m_prepare_common_aux_payload.call_count) expected = [ mock.call( @@ -3921,9 +3795,7 @@ def _check(self): ignore_mismatch=True, ), ] - self.assertEqual( - expected, self.m_prepare_common_aux_payload.call_args_list - ) + self.assertEqual(expected, self.m_prepare_common_aux_payload.call_args_list) self.assertEqual(1, self.m_prepare_local_payload.call_count) expected = [ mock.call( @@ -3939,9 +3811,7 @@ def _check(self): mock.call(self.tgt_cube, self.tgt_category_local, from_src=False), mock.call(self.src_cube, self.src_category_local), ] - self.assertEqual( - expected, self.m_prepare_factory_payload.call_args_list - ) + self.assertEqual(expected, self.m_prepare_factory_payload.call_args_list) def test_map_rhs_to_lhs__true(self): self.resolve.map_rhs_to_lhs = True @@ -4031,15 +3901,9 @@ def test_factory__dependency_already_prepared(self): self.assertEqual(expected, self.resolve.prepared_factories) self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) expected = [ - mock.call( - coord_a.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_b.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_c.metadata, self.category_local, from_src=self.from_src - ), + mock.call(coord_a.metadata, self.category_local, from_src=self.from_src), + mock.call(coord_b.metadata, self.category_local, from_src=self.from_src), + mock.call(coord_c.metadata, self.category_local, from_src=self.from_src), ] actual = self.m_get_prepared_item.call_args_list for call in expected: @@ -4070,15 +3934,9 @@ def test_factory__dependency_local_not_prepared(self): self.assertEqual(expected, self.resolve.prepared_factories) self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) expected = [ - mock.call( - coord_a.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_b.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_c.metadata, self.category_local, from_src=self.from_src - ), + mock.call(coord_a.metadata, self.category_local, from_src=self.from_src), + mock.call(coord_b.metadata, self.category_local, from_src=self.from_src), + mock.call(coord_c.metadata, self.category_local, from_src=self.from_src), mock.call( coord_a.metadata, self.category_local, @@ -4118,15 +3976,9 @@ def test_factory__dependency_not_found(self): self.assertEqual(0, len(self.resolve.prepared_factories)) self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) expected = [ - mock.call( - coord_a.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_b.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_c.metadata, self.category_local, from_src=self.from_src - ), + mock.call(coord_a.metadata, self.category_local, from_src=self.from_src), + mock.call(coord_b.metadata, self.category_local, from_src=self.from_src), + mock.call(coord_c.metadata, self.category_local, from_src=self.from_src), mock.call( coord_a.metadata, self.category_local, @@ -4173,12 +4025,8 @@ def setUp(self): tgt=self.prepared_aux_metadata_tgt, ) ) - self.prepared_scalar_metadata_src = ( - sentinel.prepared_scalar_metadata_src - ) - self.prepared_scalar_metadata_tgt = ( - sentinel.prepared_scalar_metadata_tgt - ) + self.prepared_scalar_metadata_src = sentinel.prepared_scalar_metadata_src + self.prepared_scalar_metadata_tgt = sentinel.prepared_scalar_metadata_tgt self.prepared_items_scalar = PreparedItem( metadata=_PreparedMetadata( combined=None, @@ -4393,9 +4241,7 @@ def test_get_local_scalar_coord__from_src(self): expected = created_local_item self.assertEqual(expected, result) self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) - self.assertEqual( - expected, self.resolve.prepared_category.items_scalar[1] - ) + self.assertEqual(expected, self.resolve.prepared_category.items_scalar[1]) self.assertEqual(1, self.m_create_prepared_item.call_count) dims = (self.resolve.mapping[self.local_coord_dims[0]],) expected = [ @@ -4418,9 +4264,7 @@ def test_get_local_scalar_coord__from_tgt(self): expected = created_local_item self.assertEqual(expected, result) self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) - self.assertEqual( - expected, self.resolve.prepared_category.items_scalar[1] - ) + self.assertEqual(expected, self.resolve.prepared_category.items_scalar[1]) self.assertEqual(1, self.m_create_prepared_item.call_count) dims = self.local_coord_dims expected = [ @@ -4465,9 +4309,7 @@ def setUp(self): # # prepared coordinates # - prepared_category = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) + prepared_category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # prepared dim coordinates self.prepared_dim_0_metadata = _PreparedMetadata( combined=sentinel.prepared_dim_0_metadata_combined, @@ -4598,9 +4440,7 @@ def setUp(self): # prepared_factories = [] self.aux_factory = sentinel.aux_factory - self.prepared_factory_container = mock.Mock( - return_value=self.aux_factory - ) + self.prepared_factory_container = mock.Mock(return_value=self.aux_factory) self.prepared_factory_metadata_a = _PreparedMetadata( combined=sentinel.prepared_factory_metadata_a_combined, src=None, @@ -4636,9 +4476,7 @@ def setUp(self): self.resolve.prepared_factories = prepared_factories # Required to stop mock 'containers' failing in an 'issubclass' call. - self.patch( - "iris.common.resolve.issubclass", mock.Mock(return_value=False) - ) + self.patch("iris.common.resolve.issubclass", mock.Mock(return_value=False)) def test_no_resolved_shape(self): self.resolve._broadcast_shape = None @@ -4662,13 +4500,9 @@ def _check(self): # check dim coordinate 0 self.assertEqual(1, self.prepared_dim_0.container.call_count) expected = [ - mock.call( - self.prepared_dim_0_points, bounds=self.prepared_dim_0_bounds - ) + mock.call(self.prepared_dim_0_points, bounds=self.prepared_dim_0_bounds) ] - self.assertEqual( - expected, self.prepared_dim_0.container.call_args_list - ) + self.assertEqual(expected, self.prepared_dim_0.container.call_args_list) self.assertEqual( self.prepared_dim_0_coord.metadata, self.prepared_dim_0_metadata.combined, @@ -4676,13 +4510,9 @@ def _check(self): # check dim coordinate 1 self.assertEqual(1, self.prepared_dim_1.container.call_count) expected = [ - mock.call( - self.prepared_dim_1_points, bounds=self.prepared_dim_1_bounds - ) + mock.call(self.prepared_dim_1_points, bounds=self.prepared_dim_1_bounds) ] - self.assertEqual( - expected, self.prepared_dim_1.container.call_args_list - ) + self.assertEqual(expected, self.prepared_dim_1.container.call_args_list) self.assertEqual( self.prepared_dim_1_coord.metadata, self.prepared_dim_1_metadata.combined, @@ -4698,13 +4528,9 @@ def _check(self): # check aux coordinate 0 self.assertEqual(1, self.prepared_aux_0.container.call_count) expected = [ - mock.call( - self.prepared_aux_0_points, bounds=self.prepared_aux_0_bounds - ) + mock.call(self.prepared_aux_0_points, bounds=self.prepared_aux_0_bounds) ] - self.assertEqual( - expected, self.prepared_aux_0.container.call_args_list - ) + self.assertEqual(expected, self.prepared_aux_0.container.call_args_list) self.assertEqual( self.prepared_aux_0_coord.metadata, self.prepared_aux_0_metadata.combined, @@ -4712,13 +4538,9 @@ def _check(self): # check aux coordinate 1 self.assertEqual(1, self.prepared_aux_1.container.call_count) expected = [ - mock.call( - self.prepared_aux_1_points, bounds=self.prepared_aux_1_bounds - ) + mock.call(self.prepared_aux_1_points, bounds=self.prepared_aux_1_bounds) ] - self.assertEqual( - expected, self.prepared_aux_1.container.call_args_list - ) + self.assertEqual(expected, self.prepared_aux_1.container.call_args_list) self.assertEqual( self.prepared_aux_1_coord.metadata, self.prepared_aux_1_metadata.combined, @@ -4731,9 +4553,7 @@ def _check(self): bounds=self.prepared_scalar_0_bounds, ) ] - self.assertEqual( - expected, self.prepared_scalar_0.container.call_args_list - ) + self.assertEqual(expected, self.prepared_scalar_0.container.call_args_list) self.assertEqual( self.prepared_scalar_0_coord.metadata, self.prepared_scalar_0_metadata.combined, @@ -4746,9 +4566,7 @@ def _check(self): bounds=self.prepared_scalar_1_bounds, ) ] - self.assertEqual( - expected, self.prepared_scalar_1.container.call_args_list - ) + self.assertEqual(expected, self.prepared_scalar_1.container.call_args_list) self.assertEqual( self.prepared_scalar_1_coord.metadata, self.prepared_scalar_1_metadata.combined, @@ -4758,12 +4576,8 @@ def _check(self): expected = [ mock.call(self.prepared_aux_0_coord, self.prepared_aux_0_dims), mock.call(self.prepared_aux_1_coord, self.prepared_aux_1_dims), - mock.call( - self.prepared_scalar_0_coord, self.prepared_scalar_0_dims - ), - mock.call( - self.prepared_scalar_1_coord, self.prepared_scalar_1_dims - ), + mock.call(self.prepared_scalar_0_coord, self.prepared_scalar_0_dims), + mock.call(self.prepared_scalar_1_coord, self.prepared_scalar_1_dims), ] self.assertEqual(expected, self.m_add_aux_coord.call_args_list) @@ -4783,9 +4597,7 @@ def _check(self): } ) ] - self.assertEqual( - expected, self.prepared_factory_container.call_args_list - ) + self.assertEqual(expected, self.prepared_factory_container.call_args_list) self.assertEqual(3, self.m_coord.call_count) expected = [ mock.call(self.prepared_factory_metadata_a.combined), diff --git a/lib/iris/tests/unit/concatenate/__init__.py b/lib/iris/tests/unit/concatenate/__init__.py index 6deaf26aa0..a99b5790d7 100644 --- a/lib/iris/tests/unit/concatenate/__init__.py +++ b/lib/iris/tests/unit/concatenate/__init__.py @@ -91,8 +91,7 @@ def create_metadata( else: start, stop, step = 0, N_POINTS, 1 points = ( - array_lib.arange(start, stop, step, dtype=coord_dtype) - * SCALE_FACTOR + array_lib.arange(start, stop, step, dtype=coord_dtype) * SCALE_FACTOR ) if with_bounds: diff --git a/lib/iris/tests/unit/concatenate/test__CoordMetaData.py b/lib/iris/tests/unit/concatenate/test__CoordMetaData.py index 35c3cfd17b..db8d6bc3f1 100644 --- a/lib/iris/tests/unit/concatenate/test__CoordMetaData.py +++ b/lib/iris/tests/unit/concatenate/test__CoordMetaData.py @@ -9,12 +9,7 @@ import numpy as np import pytest -from iris._concatenate import ( - _CONSTANT, - _DECREASING, - _INCREASING, - _CoordMetaData, -) +from iris._concatenate import _CONSTANT, _DECREASING, _INCREASING, _CoordMetaData from . import ExpectedItem, create_metadata @@ -79,9 +74,7 @@ def test_dim__scalar( @pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) @pytest.mark.parametrize("lazy", [False, True]) @pytest.mark.parametrize("with_bounds", [False, True]) -def test_aux( - order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool -) -> None: +def test_aux(order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool) -> None: """Test :class:`iris._concatenate._CoordMetaData` with aux coord.""" metadata = create_metadata( dim_coord=False, @@ -99,9 +92,7 @@ def test_aux( @pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) @pytest.mark.parametrize("lazy", [False, True]) @pytest.mark.parametrize("with_bounds", [False, True]) -def test_aux__scalar( - coord_dtype: np.dtype, lazy: bool, with_bounds: bool -) -> None: +def test_aux__scalar(coord_dtype: np.dtype, lazy: bool, with_bounds: bool) -> None: """Test :class:`iris._concatenate._CoordMetaData` with scalar aux coord.""" metadata = create_metadata( dim_coord=False, diff --git a/lib/iris/tests/unit/concatenate/test__CoordSignature.py b/lib/iris/tests/unit/concatenate/test__CoordSignature.py index c5e4850170..0d91b1883b 100644 --- a/lib/iris/tests/unit/concatenate/test__CoordSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CoordSignature.py @@ -44,9 +44,7 @@ class MockCubeSignature: @pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) @pytest.mark.parametrize("lazy", [False, True]) @pytest.mark.parametrize("with_bounds", [False, True]) -def test_dim( - order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool -) -> None: +def test_dim(order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool) -> None: """Test extent calculation of vector dimension coordinates.""" metadata = create_metadata( dim_coord=True, @@ -88,9 +86,7 @@ def test_dim( @pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) @pytest.mark.parametrize("lazy", [False, True]) @pytest.mark.parametrize("with_bounds", [False, True]) -def test_dim__scalar( - coord_dtype: np.dtype, lazy: bool, with_bounds: bool -) -> None: +def test_dim__scalar(coord_dtype: np.dtype, lazy: bool, with_bounds: bool) -> None: """Test extent calculation of scalar dimension coordinates.""" metadata = create_metadata( dim_coord=True, diff --git a/lib/iris/tests/unit/concatenate/test__CubeSignature.py b/lib/iris/tests/unit/concatenate/test__CubeSignature.py index 64a25a2fad..a148b6fdbd 100644 --- a/lib/iris/tests/unit/concatenate/test__CubeSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CubeSignature.py @@ -24,9 +24,7 @@ def setUp(self): cube = Cube(data, standard_name="air_temperature", units="K") # Temporal coordinate. t_units = Unit("hours since 1970-01-01 00:00:00", calendar="standard") - t_coord = DimCoord( - points=np.arange(nt), standard_name="time", units=t_units - ) + t_coord = DimCoord(points=np.arange(nt), standard_name="time", units=t_units) cube.add_dim_coord(t_coord, 0) # Increasing 1D time-series cube. @@ -44,9 +42,7 @@ def setUp(self): # Scalar 0D time-series cube with scalar time coordinate. cube = Cube(0, standard_name="air_temperature", units="K") - cube.add_aux_coord( - DimCoord(points=nt, standard_name="time", units=t_units) - ) + cube.add_aux_coord(DimCoord(points=nt, standard_name="time", units=t_units)) self.scalar_cube = cube def test_scalar_non_common_axis(self): diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index 96932e11d4..34db2b02f1 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -57,9 +57,7 @@ def setUp(self): data = np.arange(24, dtype=np.float32).reshape(2, 3, 4) cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") # Time coord - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="standard" - ) + t_unit = cf_units.Unit("hours since 1970-01-01 00:00:00", calendar="standard") t_coord = iris.coords.DimCoord( points=np.arange(2, dtype=np.float32), standard_name="time", @@ -97,9 +95,7 @@ def setUp(self): ) # Ancillary Variables cube.add_ancillary_variable( - iris.coords.AncillaryVariable( - [0, 1, 2], long_name="baz", units="1" - ), + iris.coords.AncillaryVariable([0, 1, 2], long_name="baz", units="1"), data_dims=(1,), ) # Derived Coords @@ -371,13 +367,9 @@ def build_lazy_cube(self, points, bounds=None, nx=4, aux_coords=False): cube.add_dim_coord(lat, 0) cube.add_dim_coord(lon, 1) if aux_coords: - bounds = np.arange(len(points) * nx * 4).reshape( - len(points), nx, 4 - ) + bounds = np.arange(len(points) * nx * 4).reshape(len(points), nx, 4) bounds = as_lazy_data(bounds) - aux_coord = iris.coords.AuxCoord( - data, var_name="aux_coord", bounds=bounds - ) + aux_coord = iris.coords.AuxCoord(data, var_name="aux_coord", bounds=bounds) cube.add_aux_coord(aux_coord, (0, 1)) return cube diff --git a/lib/iris/tests/unit/constraints/test_NameConstraint.py b/lib/iris/tests/unit/constraints/test_NameConstraint.py index b959b82434..ff15f8fa27 100644 --- a/lib/iris/tests/unit/constraints/test_NameConstraint.py +++ b/lib/iris/tests/unit/constraints/test_NameConstraint.py @@ -109,9 +109,7 @@ def test_long_name(self): constraint = NameConstraint(long_name=None) self.assertFalse(constraint._cube_func(self.cube)) # No match. - constraint = NameConstraint( - standard_name=None, long_name=self.long_name - ) + constraint = NameConstraint(standard_name=None, long_name=self.long_name) self.assertFalse(constraint._cube_func(self.cube)) def test_var_name(self): diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py index 2291e677bc..e8b7d817e1 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py @@ -23,9 +23,7 @@ class Test_add_categorised_coord(tests.IrisTest): def setUp(self): # Factor out common variables and objects. self.cube = mock.Mock(name="cube", coords=mock.Mock(return_value=[])) - self.coord = mock.Mock( - name="coord", points=np.arange(12).reshape(3, 4) - ) + self.coord = mock.Mock(name="coord", points=np.arange(12).reshape(3, 4)) self.units = "units" self.vectorised = mock.Mock(name="vectorized_result") @@ -78,9 +76,9 @@ def fn(coord, v): self.assertEqual( aux_coord_constructor.call_args[0][0], - vectorise_patch(fn, otypes=[object])( - self.coord, self.coord.points - ).astype("|S64"), + vectorise_patch(fn, otypes=[object])(self.coord, self.coord.points).astype( + "|S64" + ), ) @@ -89,9 +87,7 @@ def setUp(self): self.expected = { "standard": np.array(list(range(360, 367)) + list(range(1, 4))), "gregorian": np.array(list(range(360, 367)) + list(range(1, 4))), - "proleptic_gregorian": np.array( - list(range(360, 367)) + list(range(1, 4)) - ), + "proleptic_gregorian": np.array(list(range(360, 367)) + list(range(1, 4))), "noleap": np.array(list(range(359, 366)) + list(range(1, 4))), "julian": np.array(list(range(360, 367)) + list(range(1, 4))), "all_leap": np.array(list(range(360, 367)) + list(range(1, 4))), @@ -123,9 +119,7 @@ def test_calendars(self): points = cube.coord("day_of_year").points expected_points = self.expected[calendar] msg = "Test failed for the following calendar: {}." - self.assertArrayEqual( - points, expected_points, err_msg=msg.format(calendar) - ) + self.assertArrayEqual(points, expected_points, err_msg=msg.format(calendar)) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py index caf52e9c84..7b65738b15 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py @@ -24,9 +24,7 @@ def setUp(self): # use hour numbers as data values also (don't actually use this for # anything) - cube = iris.cube.Cube( - hour_numbers, long_name="test cube", units="metres" - ) + cube = iris.cube.Cube(hour_numbers, long_name="test cube", units="metres") time_coord = iris.coords.DimCoord( hour_numbers, diff --git a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py index 6560f65a32..90aef6e59a 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py +++ b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py @@ -150,12 +150,8 @@ def test_add_season_nonstandard(cube, time_coord): # season categorisations work for non-standard seasons? seasons = ["djfm", "amjj", "ason"] ccat.add_season(cube, time_coord, name="seasons", seasons=seasons) - ccat.add_season_number( - cube, time_coord, name="season_numbers", seasons=seasons - ) - ccat.add_season_year( - cube, time_coord, name="season_years", seasons=seasons - ) + ccat.add_season_number(cube, time_coord, name="season_numbers", seasons=seasons) + ccat.add_season_year(cube, time_coord, name="season_years", seasons=seasons) IrisTest.assertCML(IrisTest(), cube, ("categorisation", "customcheck.cml")) @@ -219,9 +215,7 @@ def test_add_season_membership(cube): coord_membership = cube.coord("in_season") season_locations = np.where(coord_season.points == season)[0] membership_locations = np.where(coord_membership.points)[0] - np.testing.assert_array_almost_equal( - membership_locations, season_locations - ) + np.testing.assert_array_almost_equal(membership_locations, season_locations) def test_add_season_invalid_spec(cube, season_cat_func): diff --git a/lib/iris/tests/unit/coord_systems/test_Geostationary.py b/lib/iris/tests/unit/coord_systems/test_Geostationary.py index f144dca190..0b47671d0a 100644 --- a/lib/iris/tests/unit/coord_systems/test_Geostationary.py +++ b/lib/iris/tests/unit/coord_systems/test_Geostationary.py @@ -86,9 +86,7 @@ def test_invalid_sweep(self): def test_set_optional_args(self): # Check that setting the optional (non-ellipse) args works. - crs = Geostationary( - 0, 0, 1000, "y", false_easting=100, false_northing=-200 - ) + crs = Geostationary(0, 0, 1000, "y", false_easting=100, false_northing=-200) self.assertEqualAndKind(crs.false_easting, 100.0) self.assertEqualAndKind(crs.false_northing, -200.0) @@ -105,9 +103,7 @@ def test_no_optional_args(self): def test_optional_args_None(self): # Check expected defaults with optional args=None. - crs = Geostationary( - 0, 0, 1000, "y", false_easting=None, false_northing=None - ) + crs = Geostationary(0, 0, 1000, "y", false_easting=None, false_northing=None) self._check_crs_defaults(crs) diff --git a/lib/iris/tests/unit/coord_systems/test_Mercator.py b/lib/iris/tests/unit/coord_systems/test_Mercator.py index dd2f42bb2f..177aa3a953 100644 --- a/lib/iris/tests/unit/coord_systems/test_Mercator.py +++ b/lib/iris/tests/unit/coord_systems/test_Mercator.py @@ -99,9 +99,7 @@ def test_extra_kwargs(self): true_scale_lat = 14.0 false_easting = 13 false_northing = 12 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) + ellipsoid = GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909) merc_cs = Mercator( longitude_of_projection_origin, @@ -130,9 +128,7 @@ def test_extra_kwargs_scale_factor_alternative(self): # Check that a projection with non-default values is correctly # converted to a cartopy CRS. scale_factor_at_projection_origin = 1.3 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) + ellipsoid = GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909) merc_cs = Mercator( ellipsoid=ellipsoid, @@ -166,9 +162,7 @@ def test_extra_kwargs(self): true_scale_lat = 14.0 false_easting = 13 false_northing = 12 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) + ellipsoid = GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909) merc_cs = Mercator( longitude_of_projection_origin, @@ -194,9 +188,7 @@ def test_extra_kwargs(self): self.assertEqual(res, expected) def test_extra_kwargs_scale_factor_alternative(self): - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) + ellipsoid = GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909) scale_factor_at_projection_origin = 1.3 merc_cs = Mercator( diff --git a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py index b17c1cc788..d80b4311c2 100644 --- a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py +++ b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py @@ -70,9 +70,7 @@ class ParamTuple(NamedTuple): ParamTuple( "globe", dict(ellipsoid=GeogCS(1)), - dict( - globe=GlobeWithEq(semimajor_axis=1, semiminor_axis=1, ellipse=None) - ), + dict(globe=GlobeWithEq(semimajor_axis=1, semiminor_axis=1, ellipse=None)), ), ParamTuple( "combo", @@ -92,9 +90,7 @@ class ParamTuple(NamedTuple): false_easting=1000000, false_northing=-2000000, scale_factor=0.939692620786, - globe=GlobeWithEq( - semimajor_axis=1, semiminor_axis=1, ellipse=None - ), + globe=GlobeWithEq(semimajor_axis=1, semiminor_axis=1, ellipse=None), ), ), ] @@ -118,15 +114,11 @@ class TestArgs: globe=None, ) - @pytest.fixture( - autouse=True, params=kwarg_permutations, ids=permutation_ids - ) + @pytest.fixture(autouse=True, params=kwarg_permutations, ids=permutation_ids) def make_variant_inputs(self, request) -> None: """Parse a ParamTuple into usable test information.""" inputs: ParamTuple = request.param - self.class_kwargs = dict( - self.class_kwargs_default, **inputs.class_kwargs - ) + self.class_kwargs = dict(self.class_kwargs_default, **inputs.class_kwargs) self.cartopy_kwargs_expected = dict( self.cartopy_kwargs_default, **inputs.cartopy_kwargs ) diff --git a/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py index 16f3ef2e7d..0c2e56b71c 100755 --- a/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py +++ b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py @@ -167,9 +167,7 @@ def test_extra_kwargs_scale_factor(self): scale_factor_at_projection_origin = 1.3 false_easting = 13 false_northing = 15 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) + ellipsoid = GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909) polar_cs = PolarStereographic( central_lat=central_lat, @@ -204,9 +202,7 @@ def test_extra_kwargs_true_scale_lat_alternative(self): true_scale_lat = 80 false_easting = 13 false_northing = 15 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) + ellipsoid = GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909) polar_cs = PolarStereographic( central_lat=central_lat, @@ -239,9 +235,7 @@ def setUp(self): self.as_cartopy_method = PolarStereographic.as_cartopy_crs -class Test_PolarStereographic__as_cartopy_projection( - tests.IrisTest, AsCartopyMixin -): +class Test_PolarStereographic__as_cartopy_projection(tests.IrisTest, AsCartopyMixin): def setUp(self): self.as_cartopy_method = PolarStereographic.as_cartopy_projection diff --git a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py index 4cd5f215a9..4e5ed67bf9 100644 --- a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py +++ b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py @@ -59,9 +59,7 @@ def test_projection_creation(self): def test_set_optional_args(self): # Check that setting the optional (non-ellipse) args works. - crs = VerticalPerspective( - 0, 0, 1000, false_easting=100, false_northing=-203.7 - ) + crs = VerticalPerspective(0, 0, 1000, false_easting=100, false_northing=-203.7) self.assertEqualAndKind(crs.false_easting, 100.0) self.assertEqualAndKind(crs.false_northing, -203.7) @@ -78,9 +76,7 @@ def test_no_optional_args(self): def test_optional_args_None(self): # Check expected defaults with optional args=None. - crs = VerticalPerspective( - 0, 0, 1000, false_easting=None, false_northing=None - ) + crs = VerticalPerspective(0, 0, 1000, false_easting=None, false_northing=None) self._check_crs_defaults(crs) diff --git a/lib/iris/tests/unit/coords/__init__.py b/lib/iris/tests/unit/coords/__init__.py index a99795d4da..2143868847 100644 --- a/lib/iris/tests/unit/coords/__init__.py +++ b/lib/iris/tests/unit/coords/__init__.py @@ -36,26 +36,18 @@ def setup_test_arrays(self, shape, masked=False): if masked: mpoints = ma.array(points) self.no_masked_pts_real = mpoints - self.no_masked_pts_lazy = da.from_array( - mpoints, mpoints.shape, asarray=False - ) + self.no_masked_pts_lazy = da.from_array(mpoints, mpoints.shape, asarray=False) mpoints = ma.array(mpoints, copy=True) mpoints[0] = ma.masked self.masked_pts_real = mpoints - self.masked_pts_lazy = da.from_array( - mpoints, mpoints.shape, asarray=False - ) + self.masked_pts_lazy = da.from_array(mpoints, mpoints.shape, asarray=False) mbounds = ma.array(bounds) self.no_masked_bds_real = mbounds - self.no_masked_bds_lazy = da.from_array( - mbounds, mbounds.shape, asarray=False - ) + self.no_masked_bds_lazy = da.from_array(mbounds, mbounds.shape, asarray=False) mbounds = ma.array(mbounds, copy=True) mbounds[0] = ma.masked self.masked_bds_real = mbounds - self.masked_bds_lazy = da.from_array( - mbounds, mbounds.shape, asarray=False - ) + self.masked_bds_lazy = da.from_array(mbounds, mbounds.shape, asarray=False) def is_real_data(array): diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py index 0177bcafc4..c0aba2b4a1 100644 --- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py +++ b/lib/iris/tests/unit/coords/test_AncillaryVariable.py @@ -56,9 +56,7 @@ def setupTestArrays(self, shape=(2, 3), masked=False): mvalues = ma.array(mvalues, copy=True) mvalues[0] = ma.masked self.masked_data_real = mvalues - self.masked_data_lazy = da.from_array( - mvalues, mvalues.shape, asarray=False - ) + self.masked_data_lazy = da.from_array(mvalues, mvalues.shape, asarray=False) class Test__init__(tests.IrisTest, AncillaryVariableTestMixin): @@ -78,14 +76,12 @@ def test_lazyness_and_dtype_combinations(self): self.assertArraysShareData( data, self.data_real, - "Data values are not the same " - "data as the provided array.", + "Data values are not the same data as the provided array.", ) self.assertIsNot( data, self.data_real, - "Data array is the same instance as the provided " - "array.", + "Data array is the same instance as the provided array.", ) else: # the original data values were cast to a test dtype. @@ -224,9 +220,7 @@ def test_dtypes(self): # floating dtype. # Check that dtypes remain the same in all cases, taking the dtypes # directly from the core data as we have no masking). - for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses( - self - ): + for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(self): sub_ancill_var = main_ancill_var[:2, 1] ancill_var_dtype = main_ancill_var.dtype @@ -239,18 +233,14 @@ def test_dtypes(self): self.assertEqual( sub_data.dtype, ancill_var_dtype, - msg.format( - ancill_var_dtype, data_lazyness, "data", sub_data.dtype - ), + msg.format(ancill_var_dtype, data_lazyness, "data", sub_data.dtype), ) def test_lazyness(self): # Index ancillary variables with real+lazy data, and either an int or # floating dtype. # Check that lazy data stays lazy and real stays real, in all cases. - for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses( - self - ): + for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(self): sub_ancill_var = main_ancill_var[:2, 1] msg = ( @@ -274,9 +264,7 @@ def test_lazyness(self): def test_real_data_copies(self): # Index ancillary variables with real+lazy data. # In all cases, check that any real arrays are copied by the indexing. - for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses( - self - ): + for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(self): sub_ancill_var = main_ancill_var[:2, 1] msg = ( @@ -304,9 +292,7 @@ def test_lazyness(self): # Copy ancillary variables with real+lazy data, and either an int or # floating dtype. # Check that lazy data stays lazy and real stays real, in all cases. - for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses( - self - ): + for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(self): ancill_var_dtype = main_ancill_var.dtype copied_ancill_var = main_ancill_var.copy() @@ -315,9 +301,7 @@ def test_lazyness(self): "changed lazyness of {} from {!r} to {!r}." ) - copied_data_lazyness = lazyness_string( - copied_ancill_var.core_data() - ) + copied_data_lazyness = lazyness_string(copied_ancill_var.core_data()) self.assertEqual( copied_data_lazyness, data_lazyness, @@ -333,9 +317,7 @@ def test_lazyness(self): def test_realdata_copies(self): # Copy ancillary variables with real+lazy data. # In all cases, check that any real arrays are copies, not views. - for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses( - self - ): + for main_ancill_var, data_lazyness in data_all_dtypes_and_lazynesses(self): copied_ancill_var = main_ancill_var.copy() msg = ( @@ -619,55 +601,41 @@ def setUp(self): def test_not_compatible_diff_name(self): # Different name() - not compatible self.modified_ancill_var.rename("air_temperature") - self.assertFalse( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) + self.assertFalse(self.ancill_var.is_compatible(self.modified_ancill_var)) def test_not_compatible_diff_units(self): # Different units- not compatible self.modified_ancill_var.units = "m" - self.assertFalse( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) + self.assertFalse(self.ancill_var.is_compatible(self.modified_ancill_var)) def test_not_compatible_diff_common_attrs(self): # Different common attributes - not compatible. self.ancill_var.attributes["source"] = "A" self.modified_ancill_var.attributes["source"] = "B" - self.assertFalse( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) + self.assertFalse(self.ancill_var.is_compatible(self.modified_ancill_var)) def test_compatible_diff_data(self): # Different data values - compatible. self.modified_ancill_var.data = [10.0, 20.0, 100.0] - self.assertTrue( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) + self.assertTrue(self.ancill_var.is_compatible(self.modified_ancill_var)) def test_compatible_diff_var_name(self): # Different var_name (but same name()) - compatible. self.modified_ancill_var.var_name = "obs_num" - self.assertTrue( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) + self.assertTrue(self.ancill_var.is_compatible(self.modified_ancill_var)) def test_compatible_diff_non_common_attributes(self): # Different non-common attributes - compatible. self.ancill_var.attributes["source"] = "A" self.modified_ancill_var.attributes["origin"] = "B" - self.assertTrue( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) + self.assertTrue(self.ancill_var.is_compatible(self.modified_ancill_var)) def test_compatible_ignore_common_attribute(self): # ignore different common attributes - compatible. self.ancill_var.attributes["source"] = "A" self.modified_ancill_var.attributes["source"] = "B" self.assertTrue( - self.ancill_var.is_compatible( - self.modified_ancill_var, ignore="source" - ) + self.ancill_var.is_compatible(self.modified_ancill_var, ignore="source") ) diff --git a/lib/iris/tests/unit/coords/test_AuxCoord.py b/lib/iris/tests/unit/coords/test_AuxCoord.py index 31bd54eb12..abdf4867f7 100644 --- a/lib/iris/tests/unit/coords/test_AuxCoord.py +++ b/lib/iris/tests/unit/coords/test_AuxCoord.py @@ -59,8 +59,7 @@ def test_lazyness_and_dtype_combinations(self): self.assertIsNot( pts, self.pts_real, - "Points array is the same instance as the provided " - "array.", + "Points array is the same instance as the provided array.", ) else: # the original points were cast to a test dtype. @@ -86,8 +85,7 @@ def test_lazyness_and_dtype_combinations(self): self.assertIsNot( pts, self.pts_real, - "Bounds array is the same instance as the provided " - "array.", + "Bounds array is the same instance as the provided array.", ) else: # the original bounds were cast to a test dtype. @@ -470,9 +468,7 @@ def test_real_data_copies(self): main_points = main_coord.core_points() sub_points = sub_coord.core_points() sub_main_points = main_points[:2, 1] - self.assertEqualRealArraysAndDtypes( - sub_points, sub_main_points - ) + self.assertEqualRealArraysAndDtypes(sub_points, sub_main_points) self.assertArraysDoNotShareData( sub_points, sub_main_points, @@ -483,9 +479,7 @@ def test_real_data_copies(self): main_bounds = main_coord.core_bounds() sub_bounds = sub_coord.core_bounds() sub_main_bounds = main_bounds[:2, 1] - self.assertEqualRealArraysAndDtypes( - sub_bounds, sub_main_bounds - ) + self.assertEqualRealArraysAndDtypes(sub_bounds, sub_main_bounds) self.assertArraysDoNotShareData( sub_bounds, sub_main_bounds, diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py index b34ffdfb91..2b9f808404 100644 --- a/lib/iris/tests/unit/coords/test_Cell.py +++ b/lib/iris/tests/unit/coords/test_Cell.py @@ -53,9 +53,7 @@ def test_cftime_calender_bounded_cell(self): datetime.datetime(2011, 1, 1), ], ) - self.assert_raises_on_comparison( - cell, dt, TypeError, "different calendars" - ) + self.assert_raises_on_comparison(cell, dt, TypeError, "different calendars") def test_PartialDateTime_unbounded_cell(self): # Check that cell comparison works with PartialDateTimes. @@ -254,9 +252,7 @@ def test_cell_lhs(self): bool(cell == n) bool(cell != n) except: # noqa - self.fail( - "Result of comparison could not be used as a truth value" - ) + self.fail("Result of comparison could not be used as a truth value") def test_cell_rhs(self): cell = Cell(point=1.5) @@ -270,9 +266,7 @@ def test_cell_rhs(self): bool(n == cell) bool(n != cell) except: # noqa - self.fail( - "Result of comparison could not be used as a truth value" - ) + self.fail("Result of comparison could not be used as a truth value") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/coords/test_CellMeasure.py b/lib/iris/tests/unit/coords/test_CellMeasure.py index c667e012ef..c8c1e72041 100644 --- a/lib/iris/tests/unit/coords/test_CellMeasure.py +++ b/lib/iris/tests/unit/coords/test_CellMeasure.py @@ -110,10 +110,7 @@ def test___str__(self): self.assertEqual(self.measure.__str__(), expected) def test___repr__(self): - expected = ( - "" - ) + expected = "" self.assertEqual(expected, self.measure.__repr__()) def test__eq__(self): diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 14dcdf7ca0..5745c870ce 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -30,9 +30,7 @@ def setUp(self): points = [0.0, 90.0, 180.0, 270.0] self.coord = DimCoord(points, circular=False, units="degrees") - def _test_nearest_neighbour_index( - self, target, bounds=None, circular=False - ): + def _test_nearest_neighbour_index(self, target, bounds=None, circular=False): _bounds = [[-20, 10], [10, 100], [100, 260], [260, 340]] ext_pnts = [-70, -10, 110, 275, 370] if bounds is True: @@ -85,9 +83,7 @@ def setUp(self): points = [270.0, 180.0, 90.0, 0.0] self.coord = DimCoord(points, circular=False, units="degrees") - def _test_nearest_neighbour_index( - self, target, bounds=False, circular=False - ): + def _test_nearest_neighbour_index(self, target, bounds=False, circular=False): _bounds = [[340, 260], [260, 100], [100, 10], [10, -20]] ext_pnts = [-70, -10, 110, 275, 370] if bounds: @@ -209,9 +205,7 @@ def test_points_inside_bounds_outside(self): self.assertArrayEqual(lat.bounds, [[-90, -40], [-40, 35], [35, 90]]) def test_points_inside_bounds_outside_grid_latitude(self): - lat = DimCoord( - [-80, 0, 70], units="degree", standard_name="grid_latitude" - ) + lat = DimCoord([-80, 0, 70], units="degree", standard_name="grid_latitude") lat.guess_bounds() self.assertArrayEqual(lat.bounds, [[-90, -40], [-40, 35], [35, 90]]) @@ -221,9 +215,7 @@ def test_points_to_edges_bounds_outside(self): self.assertArrayEqual(lat.bounds, [[-90, -45], [-45, 45], [45, 90]]) def test_points_outside(self): - lat = DimCoord( - [-100, 0, 120], units="degree", standard_name="latitude" - ) + lat = DimCoord([-100, 0, 120], units="degree", standard_name="latitude") lat.guess_bounds() self.assertArrayEqual(lat.bounds, [[-150, -50], [-50, 60], [60, 180]]) @@ -238,9 +230,7 @@ def test_points_inside_bounds_outside_wrong_name(self): self.assertArrayEqual(lat.bounds, [[-120, -40], [-40, 35], [35, 105]]) def test_points_inside_bounds_outside_wrong_name_2(self): - lat = DimCoord( - [-80, 0, 70], units="degree", long_name="other_latitude" - ) + lat = DimCoord([-80, 0, 70], units="degree", long_name="other_latitude") lat.guess_bounds() self.assertArrayEqual(lat.bounds, [[-120, -40], [-40, 35], [35, 105]]) @@ -313,9 +303,7 @@ def _serialize(data): for points, bounds in [string, string_nobounds, string_multi]: coord = AuxCoord(points=points, bounds=bounds, units=units) collapsed_coord = coord.collapsed() - self.assertArrayEqual( - collapsed_coord.points, _serialize(points) - ) + self.assertArrayEqual(collapsed_coord.points, _serialize(points)) if bounds is not None: for index in np.ndindex(bounds.shape[1:]): index_slice = (slice(None),) + tuple(index) @@ -334,9 +322,7 @@ def test_dim_1d(self): coord.units = units with self.assertNoWarningsRegexp(): collapsed_coord = coord.collapsed() - self.assertArrayEqual( - collapsed_coord.points, np.mean(coord.points) - ) + self.assertArrayEqual(collapsed_coord.points, np.mean(coord.points)) self.assertArrayEqual( collapsed_coord.bounds, [[coord.bounds.min(), coord.bounds.max()]], @@ -350,9 +336,7 @@ def test_lazy_points(self): self.assertTrue(collapsed_coord.has_lazy_points()) def test_numeric_nd(self): - coord = AuxCoord( - points=np.array([[1, 2, 4, 5], [4, 5, 7, 8], [7, 8, 10, 11]]) - ) + coord = AuxCoord(points=np.array([[1, 2, 4, 5], [4, 5, 7, 8], [7, 8, 10, 11]])) collapsed_coord = coord.collapsed() self.assertArrayEqual(collapsed_coord.points, np.array([6])) @@ -360,9 +344,7 @@ def test_numeric_nd(self): # Test partially collapsing one dimension... collapsed_coord = coord.collapsed(1) - self.assertArrayEqual( - collapsed_coord.points, np.array([3.0, 6.0, 9.0]) - ) + self.assertArrayEqual(collapsed_coord.points, np.array([3.0, 6.0, 9.0])) self.assertArrayEqual( collapsed_coord.bounds, np.array([[1, 5], [4, 8], [7, 11]]) ) @@ -397,9 +379,7 @@ def test_numeric_nd_bounds_first(self): coord = AuxCoord(self.pts_real, bounds=self.bds_real) # ... and the other.. collapsed_coord = coord.collapsed(0) - self.assertArrayEqual( - collapsed_coord.points, np.array([40, 50, 60, 70]) - ) + self.assertArrayEqual(collapsed_coord.points, np.array([40, 50, 60, 70])) self.assertArrayEqual( collapsed_coord.bounds, np.array([[-2, 82], [8, 92], [18, 102], [28, 112]]), @@ -444,9 +424,7 @@ def test_lazy_nd_bounds_first(self): coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) collapsed_coord = coord.collapsed(0) - self.assertArrayEqual( - collapsed_coord.points, np.array([40, 50, 60, 70]) - ) + self.assertArrayEqual(collapsed_coord.points, np.array([40, 50, 60, 70])) self.assertArrayEqual( collapsed_coord.bounds, np.array([[-2, 82], [8, 92], [18, 102], [28, 112]]), @@ -537,9 +515,7 @@ def test_numeric_3_bounds(self): self.assertFalse(collapsed_coord.has_lazy_bounds()) self.assertArrayAlmostEqual(collapsed_coord.points, np.array([4.0])) - self.assertArrayAlmostEqual( - collapsed_coord.bounds, np.array([[2.0, 6.0]]) - ) + self.assertArrayAlmostEqual(collapsed_coord.bounds, np.array([[2.0, 6.0]])) def test_lazy_3_bounds(self): points = da.arange(3) * 2.0 @@ -560,9 +536,7 @@ def test_lazy_3_bounds(self): self.assertTrue(collapsed_coord.has_lazy_bounds()) self.assertArrayAlmostEqual(collapsed_coord.points, da.array([2.0])) - self.assertArrayAlmostEqual( - collapsed_coord.bounds, da.array([[0.0, 4.0]]) - ) + self.assertArrayAlmostEqual(collapsed_coord.bounds, da.array([[0.0, 4.0]])) class Test_is_compatible(tests.IrisTest): @@ -591,10 +565,7 @@ def test_different_array_attrs_incompatible(self): class Test_contiguous_bounds(tests.IrisTest): def test_1d_coord_no_bounds_warning(self): coord = DimCoord([0, 1, 2], standard_name="latitude") - msg = ( - "Coordinate 'latitude' is not bounded, guessing contiguous " - "bounds." - ) + msg = "Coordinate 'latitude' is not bounded, guessing contiguous bounds." with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") @@ -609,9 +580,7 @@ def test_2d_coord_no_bounds_error(self): def test__sanity_check_bounds_call(self): coord = DimCoord([5, 15, 25], bounds=[[0, 10], [10, 20], [20, 30]]) - with mock.patch( - "iris.coords.Coord._sanity_check_bounds" - ) as bounds_check: + with mock.patch("iris.coords.Coord._sanity_check_bounds") as bounds_check: coord.contiguous_bounds() bounds_check.assert_called_once() @@ -697,9 +666,7 @@ def setUp(self): ) def test_1d_contiguous(self): - coord = DimCoord( - [-20, 0, 20], bounds=[[-30, -10], [-10, 10], [10, 30]] - ) + coord = DimCoord([-20, 0, 20], bounds=[[-30, -10], [-10, 10], [10, 30]]) contiguous, diffs = coord._discontiguity_in_bounds() self.assertTrue(contiguous) self.assertArrayEqual(diffs, np.zeros(2)) @@ -732,9 +699,7 @@ def test_2d_discontiguous_along_x(self): contiguous, diffs = coord._discontiguity_in_bounds() diffs_along_x, diffs_along_y = diffs self.assertFalse(contiguous) - self.assertArrayEqual( - diffs_along_x, np.array([True, True, True]).reshape(3, 1) - ) + self.assertArrayEqual(diffs_along_x, np.array([True, True, True]).reshape(3, 1)) self.assertTrue(not diffs_along_y.any()) def test_2d_discontiguous_along_y(self): @@ -790,9 +755,7 @@ def test_2d_one_cell(self): def test_2d_one_cell_along_x(self): # Test a 2D coord with a single cell along the x axis, where the coord # has shape (2, 1). - coord = AuxCoord( - self.points_3by3[:, :1], bounds=self.lat_bounds_3by3[:, :1, :] - ) + coord = AuxCoord(self.points_3by3[:, :1], bounds=self.lat_bounds_3by3[:, :1, :]) contiguous, diffs = coord._discontiguity_in_bounds() diffs_along_x, diffs_along_y = diffs self.assertTrue(contiguous) @@ -802,9 +765,7 @@ def test_2d_one_cell_along_x(self): def test_2d_one_cell_along_y(self): # Test a 2D coord with a single cell along the y axis, where the coord # has shape (1, 2). - coord = AuxCoord( - self.points_3by3[:1, :], bounds=self.lon_bounds_3by3[:1, :, :] - ) + coord = AuxCoord(self.points_3by3[:1, :], bounds=self.lon_bounds_3by3[:1, :, :]) contiguous, diffs = coord._discontiguity_in_bounds() diffs_along_x, diffs_along_y = diffs self.assertTrue(contiguous) @@ -898,10 +859,7 @@ def test_coord_1d_2_bounds(self): def test_coord_1d_no_bounds(self): coord = iris.coords.DimCoord([0, 1], standard_name="latitude") - emsg = ( - "Contiguous bounds are only defined for 1D coordinates with " - "2 bounds." - ) + emsg = "Contiguous bounds are only defined for 1D coordinates with 2 bounds." with self.assertRaisesRegex(ValueError, emsg): coord._sanity_check_bounds() @@ -909,10 +867,7 @@ def test_coord_1d_1_bounds(self): coord = iris.coords.DimCoord( [0, 1], standard_name="latitude", bounds=np.array([[0], [1]]) ) - emsg = ( - "Contiguous bounds are only defined for 1D coordinates with " - "2 bounds." - ) + emsg = "Contiguous bounds are only defined for 1D coordinates with 2 bounds." with self.assertRaisesRegex(ValueError, emsg): coord._sanity_check_bounds() @@ -927,13 +882,8 @@ def test_coord_2d_4_bounds(self): coord._sanity_check_bounds() def test_coord_2d_no_bounds(self): - coord = iris.coords.AuxCoord( - [[0, 0], [1, 1]], standard_name="latitude" - ) - emsg = ( - "Contiguous bounds are only defined for 2D coordinates with " - "4 bounds." - ) + coord = iris.coords.AuxCoord([[0, 0], [1, 1]], standard_name="latitude") + emsg = "Contiguous bounds are only defined for 2D coordinates with 4 bounds." with self.assertRaisesRegex(ValueError, emsg): coord._sanity_check_bounds() @@ -943,17 +893,12 @@ def test_coord_2d_2_bounds(self): standard_name="latitude", bounds=np.array([[[0, 1], [0, 1]], [[1, 2], [1, 2]]]), ) - emsg = ( - "Contiguous bounds are only defined for 2D coordinates with " - "4 bounds." - ) + emsg = "Contiguous bounds are only defined for 2D coordinates with 4 bounds." with self.assertRaisesRegex(ValueError, emsg): coord._sanity_check_bounds() def test_coord_3d(self): - coord = iris.coords.AuxCoord( - np.zeros((2, 2, 2)), standard_name="height" - ) + coord = iris.coords.AuxCoord(np.zeros((2, 2, 2)), standard_name="height") emsg = ( "Contiguous bounds are not defined for coordinates with more " "than 2 dimensions." @@ -975,9 +920,7 @@ def test_convert_unknown_units(self): class Test___str__(tests.IrisTest): def test_short_time_interval(self): - coord = DimCoord( - [5], standard_name="time", units="days since 1970-01-01" - ) + coord = DimCoord([5], standard_name="time", units="days since 1970-01-01") expected = "\n".join( [ "DimCoord : time / (days since 1970-01-01, standard calendar)", @@ -991,9 +934,7 @@ def test_short_time_interval(self): self.assertEqual(expected, result) def test_short_time_interval__bounded(self): - coord = DimCoord( - [5, 6], standard_name="time", units="days since 1970-01-01" - ) + coord = DimCoord([5, 6], standard_name="time", units="days since 1970-01-01") coord.guess_bounds() expected = "\n".join( [ @@ -1011,9 +952,7 @@ def test_short_time_interval__bounded(self): self.assertEqual(expected, result) def test_long_time_interval(self): - coord = DimCoord( - [5], standard_name="time", units="years since 1970-01-01" - ) + coord = DimCoord([5], standard_name="time", units="years since 1970-01-01") expected = "\n".join( [ "DimCoord : time / (years since 1970-01-01, standard calendar)", @@ -1027,9 +966,7 @@ def test_long_time_interval(self): self.assertEqual(expected, result) def test_long_time_interval__bounded(self): - coord = DimCoord( - [5, 6], standard_name="time", units="years since 1970-01-01" - ) + coord = DimCoord([5, 6], standard_name="time", units="years since 1970-01-01") coord.guess_bounds() expected = "\n".join( [ @@ -1085,9 +1022,7 @@ def test_create_no_bounds_no_set(self): def test_create_no_time_no_set(self): emsg = "Cannot set climatological .* valid time reference units.*" with self.assertRaisesRegex(TypeError, emsg): - AuxCoord( - points=[0, 1], bounds=[[0, 1], [1, 2]], climatological=True - ) + AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]], climatological=True) def test_absent(self): coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]]) @@ -1185,10 +1120,7 @@ def test_copy_coord(self, ignore_axis, copy_or_from, result, sample_coord): class Test___init____abstractmethod(tests.IrisTest): def test(self): - emsg = ( - "Can't instantiate abstract class Coord with abstract" - " method.* __init__" - ) + emsg = "Can't instantiate abstract class Coord with abstract method.* __init__" with self.assertRaisesRegex(TypeError, emsg): _ = Coord(points=[0, 1]) @@ -1203,9 +1135,7 @@ def test(self): result = test_coord.cube_dims(mock_cube) self.assertEqual(result, mock_dims_result) - self.assertEqual( - mock_dims_call.call_args_list, [mock.call(test_coord)] - ) + self.assertEqual(mock_dims_call.call_args_list, [mock.call(test_coord)]) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/coords/test_DimCoord.py b/lib/iris/tests/unit/coords/test_DimCoord.py index 2c8ab3a7ba..0b1d123563 100644 --- a/lib/iris/tests/unit/coords/test_DimCoord.py +++ b/lib/iris/tests/unit/coords/test_DimCoord.py @@ -412,9 +412,7 @@ def test_real_data_copies(self): main_points = main_coord.core_points() sub_points = sub_coord.core_points() sub_main_points = main_points[:2] - self.assertEqualRealArraysAndDtypes( - sub_points, sub_main_points - ) + self.assertEqualRealArraysAndDtypes(sub_points, sub_main_points) self.assertArraysDoNotShareData( sub_points, sub_main_points, @@ -425,9 +423,7 @@ def test_real_data_copies(self): main_bounds = main_coord.core_bounds() sub_bounds = sub_coord.core_bounds() sub_main_bounds = main_bounds[:2] - self.assertEqualRealArraysAndDtypes( - sub_bounds, sub_main_bounds - ) + self.assertEqualRealArraysAndDtypes(sub_bounds, sub_main_bounds) self.assertArraysDoNotShareData( sub_bounds, sub_main_bounds, @@ -441,9 +437,7 @@ def setUp(self): self.setupTestArrays() def test_writable_points(self): - coord1 = DimCoord( - np.arange(5), bounds=[[0, 1], [1, 2], [2, 3], [3, 4], [4, 5]] - ) + coord1 = DimCoord(np.arange(5), bounds=[[0, 1], [1, 2], [2, 3], [3, 4], [4, 5]]) coord2 = coord1.copy() msg = "destination is read-only" diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index 91a50a9a1c..1cdd6dfe5c 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -216,9 +216,7 @@ def test_simple(self): self.assertLines(expected, result) def test_minimal(self): - result = self.coord_representations( - long_name=None, units=None, shape=(1,) - ) + result = self.coord_representations(long_name=None, units=None, shape=(1,)) expected = [ "", "AuxCoord : unknown / (unknown)", @@ -287,19 +285,11 @@ def test_dtype_int(self): def test_dtype_date(self): # Note: test with a date 'longer' than the built-in one in # 'sample_coord(dates=True)', because it includes a time-of-day - full_date_unit = Unit( - "days since 1892-05-17 03:00:25", calendar="360_day" - ) + full_date_unit = Unit("days since 1892-05-17 03:00:25", calendar="360_day") result = self.coord_representations(units=full_date_unit) expected = [ - ( - "" - ), - ( - "AuxCoord : x / (days since 1892-05-17 03:00:25, " - "360_day calendar)" - ), + (""), + ("AuxCoord : x / (days since 1892-05-17 03:00:25, 360_day calendar)"), " points: [", " 1892-05-17 03:00:25, 1892-05-18 03:00:25,", " 1892-05-19 03:00:25, 1892-05-20 03:00:25,", @@ -403,9 +393,7 @@ def test_scalar(self): self.assertLines(expected, result) def test_scalar_masked(self): - result = self.coord_representations( - shape=(1,), bounded=True, masked=True - ) + result = self.coord_representations(shape=(1,), bounded=True, masked=True) expected = [ "", "AuxCoord : x / (m)", @@ -457,10 +445,7 @@ def test_length_long(self): # Completely truncated representations result = self.coord_representations(shape=(150,), bounded=True) expected = [ - ( - "" - ), + (""), "AuxCoord : x / (m)", " points: [ 0., 1., ..., 148., 149.]", " bounds: [", @@ -685,9 +670,7 @@ def test_integers_masked(self): self.assertLines(expected, result) def test_integers_masked_long(self): - result = self.coord_representations( - shape=(20,), datatype=int, masked=True - ) + result = self.coord_representations(shape=(20,), datatype=int, masked=True) expected = [ "", "AuxCoord : x / (m)", @@ -742,10 +725,7 @@ def test_climatological(self): coord = coord[:1] # Just to make it a bit shorter result = self.repr_str_strings(coord) expected = [ - ( - "" - ), + (""), ( "DimCoord : time / (days since 1970-01-01 00:00:00-00, " "standard calendar)" diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 5e513c2bd0..0829b8ccf6 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -26,13 +26,7 @@ from iris.aux_factory import HybridHeightFactory from iris.common.metadata import BaseMetadata import iris.coords -from iris.coords import ( - AncillaryVariable, - AuxCoord, - CellMeasure, - CellMethod, - DimCoord, -) +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, CellMethod, DimCoord from iris.cube import Cube, CubeAttrsDict import iris.exceptions from iris.exceptions import ( @@ -44,11 +38,7 @@ UnitConversionError, ) import iris.tests.stock as stock -from iris.tests.stock.mesh import ( - sample_mesh, - sample_mesh_cube, - sample_meshcoord, -) +from iris.tests.stock.mesh import sample_mesh, sample_mesh_cube, sample_meshcoord class Test___init___data(tests.IrisTest): @@ -83,9 +73,7 @@ def test_matrix(self): class Test_data_dtype_fillvalue(tests.IrisTest): - def _sample_data( - self, dtype=("f4"), masked=False, fill_value=None, lazy=False - ): + def _sample_data(self, dtype=("f4"), masked=False, fill_value=None, lazy=False): data = np.arange(6).reshape((2, 3)) dtype = np.dtype(dtype) data = data.astype(dtype) @@ -97,9 +85,7 @@ def _sample_data( data = as_lazy_data(data) return data - def _sample_cube( - self, dtype=("f4"), masked=False, fill_value=None, lazy=False - ): + def _sample_cube(self, dtype=("f4"), masked=False, fill_value=None, lazy=False): data = self._sample_data( dtype=dtype, masked=masked, fill_value=fill_value, lazy=lazy ) @@ -355,9 +341,7 @@ def test_multidims_weights_none(self): def test_non_lazy_aggregator(self): # An aggregator which doesn't have a lazy function should still work. - dummy_agg = Aggregator( - "custom_op", lambda x, axis=None: np.mean(x, axis=axis) - ) + dummy_agg = Aggregator("custom_op", lambda x, axis=None: np.mean(x, axis=axis)) result = self.cube.collapsed("x", dummy_agg) self.assertFalse(result.has_lazy_data()) self.assertArrayEqual(result.data, np.mean(self.data, axis=1)) @@ -393,9 +377,7 @@ def test_weighted_fullweights_real_y(self): cube_collapsed = self.cube_real.collapsed( "y", MEAN, weights=self.full_weights_y ) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_y - ) + self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_y) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") @@ -405,31 +387,21 @@ def test_weighted_fullweights_lazy_y(self): "y", MEAN, weights=self.full_weights_y ) self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_y - ) + self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_y) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") def test_weighted_1dweights_real_y(self): # 1-D weights, real data : Check same results as full-shape. - cube_collapsed = self.cube_real.collapsed( - "y", MEAN, weights=self.y_weights - ) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_y - ) + cube_collapsed = self.cube_real.collapsed("y", MEAN, weights=self.y_weights) + self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_y) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_1dweights_lazy_y(self): # 1-D weights, lazy data : Check lazy result, same values as real calc. - cube_collapsed = self.cube_lazy.collapsed( - "y", MEAN, weights=self.y_weights - ) + cube_collapsed = self.cube_lazy.collapsed("y", MEAN, weights=self.y_weights) self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_y - ) + self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_y) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") def test_weighted_fullweights_real_x(self): @@ -437,9 +409,7 @@ def test_weighted_fullweights_real_x(self): cube_collapsed = self.cube_real.collapsed( "x", MEAN, weights=self.full_weights_x ) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_x - ) + self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_x) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") @@ -449,48 +419,34 @@ def test_weighted_fullweights_lazy_x(self): "x", MEAN, weights=self.full_weights_x ) self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_x - ) + self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_x) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_1dweights_real_x(self): # 1-D weights, real data, ** collapse X ** : as for 'y' case above - cube_collapsed = self.cube_real.collapsed( - "x", MEAN, weights=self.x_weights - ) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_x - ) + cube_collapsed = self.cube_real.collapsed("x", MEAN, weights=self.x_weights) + self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_x) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_1dweights_lazy_x(self): # 1-D weights, lazy data, ** collapse X ** : as for 'y' case above - cube_collapsed = self.cube_lazy.collapsed( - "x", MEAN, weights=self.x_weights - ) + cube_collapsed = self.cube_lazy.collapsed("x", MEAN, weights=self.x_weights) self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_x - ) + self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_x) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_sum_fullweights_adapt_units_real_y(self): # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) - cube_collapsed = self.cube_real.collapsed( - "y", SUM, weights=self.full_weights_y - ) + cube_collapsed = self.cube_real.collapsed("y", SUM, weights=self.full_weights_y) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") def test_weighted_sum_fullweights_adapt_units_lazy_y(self): # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) - cube_collapsed = self.cube_lazy.collapsed( - "y", SUM, weights=self.full_weights_y - ) + cube_collapsed = self.cube_lazy.collapsed("y", SUM, weights=self.full_weights_y) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") @@ -498,9 +454,7 @@ def test_weighted_sum_1dweights_adapt_units_real_y(self): # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) # Note: the same test with lazy data fails: # https://github.com/SciTools/iris/issues/5083 - cube_collapsed = self.cube_real.collapsed( - "y", SUM, weights=self.y_weights - ) + cube_collapsed = self.cube_real.collapsed("y", SUM, weights=self.y_weights) self.assertEqual(cube_collapsed.units, "kg m-2 s-1") self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") @@ -553,25 +507,19 @@ def setUp(self): def test_weighted_sum_fullweights_adapt_units_real_y(self): # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) - cube_collapsed = self.cube_real.collapsed( - "y", SUM, weights=self.full_weights_y - ) + cube_collapsed = self.cube_real.collapsed("y", SUM, weights=self.full_weights_y) self.assertEqual(cube_collapsed.units, "kg s-1") def test_weighted_sum_fullweights_adapt_units_lazy_y(self): # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) - cube_collapsed = self.cube_lazy.collapsed( - "y", SUM, weights=self.full_weights_y - ) + cube_collapsed = self.cube_lazy.collapsed("y", SUM, weights=self.full_weights_y) self.assertEqual(cube_collapsed.units, "kg s-1") def test_weighted_sum_1dweights_adapt_units_real_y(self): # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) # Note: the same test with lazy data fails: # https://github.com/SciTools/iris/issues/5083 - cube_collapsed = self.cube_real.collapsed( - "y", SUM, weights=self.y_weights - ) + cube_collapsed = self.cube_real.collapsed("y", SUM, weights=self.y_weights) self.assertEqual(cube_collapsed.units, "kg s-1") @@ -803,9 +751,7 @@ def test_collapsed_lon_with_3_bounds(self): def test_collapsed_lat_lon_with_3_bounds(self): """Collapse latitude and longitude with 3 bounds.""" with mock.patch("warnings.warn") as warn: - collapsed_cube = self.cube.collapsed( - ["latitude", "longitude"], SUM - ) + collapsed_cube = self.cube.collapsed(["latitude", "longitude"], SUM) self._assert_warn_cannot_check_contiguity(warn) self._assert_cube_as_expected(collapsed_cube) @@ -837,9 +783,7 @@ def test_ancillary_variable(self): self.assertEqual(expected_summary, cube.summary()) def test_similar_coords(self): - coord1 = AuxCoord( - 42, long_name="foo", attributes=dict(bar=np.array([2, 5])) - ) + coord1 = AuxCoord(42, long_name="foo", attributes=dict(bar=np.array([2, 5]))) coord2 = coord1.copy() coord2.attributes = dict(bar="baz") for coord in [coord1, coord2]: @@ -865,16 +809,12 @@ def test_long_components(self): old_name = component.name() component.rename(long_name) new_summary = cube.summary() - component.rename( - old_name - ) # Put each back the way it was afterwards + component.rename(old_name) # Put each back the way it was afterwards # Check that the resulting 'stretched' output has dimension columns aligned correctly. lines = new_summary.split("\n") header = lines[0] - colon_inds = [ - i_char for i_char, char in enumerate(header) if char == ":" - ] + colon_inds = [i_char for i_char, char in enumerate(header) if char == ":"] for line in lines[1:]: # Replace all '-' with 'x' to make checking easier, and add a final buffer space. line = line.replace("-", "x") + " " @@ -882,9 +822,7 @@ def test_long_components(self): # For lines with any columns : check that columns are where expected for col_ind in colon_inds: # Chop out chars before+after each expected column. - self.assertEqual( - line[col_ind - 1 : col_ind + 2], " x " - ) + self.assertEqual(line[col_ind - 1 : col_ind + 2], " x ") # Finally also: compare old with new, but replacing new name and ignoring spacing differences def collapse_space(string): @@ -935,9 +873,7 @@ def setUp(self): self.cube.add_aux_coord(month_coord, 0) self.multi_dim_cube.add_dim_coord(val_coord, 0) self.multi_dim_cube.add_aux_coord(extra_coord, 1) - self.ancillary_variable = AncillaryVariable( - [0, 1, 2, 0, 1, 2], long_name="foo" - ) + self.ancillary_variable = AncillaryVariable([0, 1, 2, 0, 1, 2], long_name="foo") self.multi_dim_cube.add_ancillary_variable(self.ancillary_variable, 1) self.cell_measure = CellMeasure([0, 1, 2, 0, 1, 2], long_name="bar") self.multi_dim_cube.add_cell_measure(self.cell_measure, 1) @@ -956,9 +892,7 @@ def test_string_coord(self): units="s", ) month_coord = AuxCoord( - np.array( - ["jan|feb|mar", "feb|mar|apr", "mar|apr|may", "apr|may|jun"] - ), + np.array(["jan|feb|mar", "feb|mar|apr", "mar|apr|may", "apr|may|jun"]), bounds=np.array( [ ["jan", "mar"], @@ -978,9 +912,7 @@ def test_kwargs(self): self.cube.data = ma.array( self.cube.data, mask=([True, False, False, False, True, False]) ) - res_cube = self.cube.rolling_window( - "val", iris.analysis.MEAN, window, mdtol=0 - ) + res_cube = self.cube.rolling_window("val", iris.analysis.MEAN, window, mdtol=0) expected_result = ma.array( [-99.0, 1.5, 2.5, -99.0, -99.0], mask=[True, False, False, True, True], @@ -990,15 +922,11 @@ def test_kwargs(self): def test_ancillary_variables_and_cell_measures_kept(self): res_cube = self.multi_dim_cube.rolling_window("val", self.mock_agg, 3) - self.assertEqual( - res_cube.ancillary_variables(), [self.ancillary_variable] - ) + self.assertEqual(res_cube.ancillary_variables(), [self.ancillary_variable]) self.assertEqual(res_cube.cell_measures(), [self.cell_measure]) def test_ancillary_variables_and_cell_measures_removed(self): - res_cube = self.multi_dim_cube.rolling_window( - "extra", self.mock_agg, 3 - ) + res_cube = self.multi_dim_cube.rolling_window("extra", self.mock_agg, 3) self.assertEqual(res_cube.ancillary_variables(), []) self.assertEqual(res_cube.cell_measures(), []) @@ -1109,9 +1037,7 @@ class Test_slices_over(tests.IrisTest): def setUp(self): self.cube = stock.realistic_4d() # Define expected iterators for 1D and 2D test cases. - self.exp_iter_1d = range( - len(self.cube.coord("model_level_number").points) - ) + self.exp_iter_1d = range(len(self.cube.coord("model_level_number").points)) self.exp_iter_2d = np.ndindex(6, 70, 1, 1) # Define maximum number of interactions for particularly long # (and so time-consuming) iterators. @@ -1244,15 +1170,11 @@ def create_cube(lon_min, lon_max, bounds=False): data = as_lazy_data(data) cube = Cube(data, standard_name="x_wind", units="ms-1") cube.add_dim_coord( - iris.coords.DimCoord( - [0, 20, 40, 80], long_name="level_height", units="m" - ), + iris.coords.DimCoord([0, 20, 40, 80], long_name="level_height", units="m"), 0, ) cube.add_aux_coord( - iris.coords.AuxCoord( - [1.0, 0.9, 0.8, 0.6], long_name="sigma", units="1" - ), + iris.coords.AuxCoord([1.0, 0.9, 0.8, 0.6], long_name="sigma", units="1"), 0, ) cube.add_dim_coord( @@ -1365,9 +1287,7 @@ def test_real_data(self): cube.data result = cube.intersection(longitude=(170, 190)) self.assertFalse(result.has_lazy_data()) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(170, 191) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(170, 191)) self.assertEqual(result.data[0, 0, 0], 170) self.assertEqual(result.data[0, 0, -1], 190) @@ -1376,9 +1296,7 @@ def test_real_data_wrapped(self): cube.data result = cube.intersection(longitude=(170, 190)) self.assertFalse(result.has_lazy_data()) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(170, 191) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(170, 191)) self.assertEqual(result.data[0, 0, 0], 350) self.assertEqual(result.data[0, 0, -1], 10) @@ -1386,9 +1304,7 @@ def test_lazy_data(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(170, 190)) self.assertTrue(result.has_lazy_data()) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(170, 191) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(170, 191)) self.assertEqual(result.data[0, 0, 0], 170) self.assertEqual(result.data[0, 0, -1], 190) @@ -1396,9 +1312,7 @@ def test_lazy_data_wrapped(self): cube = create_cube(-180, 180) result = cube.intersection(longitude=(170, 190)) self.assertTrue(result.has_lazy_data()) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(170, 191) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(170, 191)) self.assertEqual(result.data[0, 0, 0], 350) self.assertEqual(result.data[0, 0, -1], 10) @@ -1407,13 +1321,9 @@ class Test_intersection_Points(tests.IrisTest): def test_ignore_bounds(self): cube = create_cube(0, 30, bounds=True) result = cube.intersection(longitude=(9.5, 12.5), ignore_bounds=True) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(10, 13) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(10, 13)) self.assertArrayEqual(result.coord("longitude").bounds[0], [9.5, 10.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [11.5, 12.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [11.5, 12.5]) # Check what happens with a regional, points-only circular intersection @@ -1422,33 +1332,25 @@ class Test_intersection__RegionalSrcModulus(tests.IrisTest): def test_request_subset(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(45, 50)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(45, 51) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(45, 51)) self.assertArrayEqual(result.data[0, 0], np.arange(5, 11)) def test_request_left(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(35, 45)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(40, 46) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(40, 46)) self.assertArrayEqual(result.data[0, 0], np.arange(0, 6)) def test_request_right(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(55, 65)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(55, 60) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(55, 60)) self.assertArrayEqual(result.data[0, 0], np.arange(15, 20)) def test_request_superset(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(35, 65)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(40, 60) - ) + self.assertArrayEqual(result.coord("longitude").points, np.arange(40, 60)) self.assertArrayEqual(result.data[0, 0], np.arange(0, 20)) def test_request_subset_modulus(self): @@ -1515,9 +1417,7 @@ def test_global_wrapped_extreme_increasing_base_period(self): lons = cube.coord("longitude") # Redefine longitude so that points at (base + period) lons.points = np.linspace(-180.0, 180, lons.points.size) - result = cube.intersection( - longitude=(lons.points.min(), lons.points.max()) - ) + result = cube.intersection(longitude=(lons.points.min(), lons.points.max())) self.assertArrayEqual(result.data, cube.data) def test_global_wrapped_extreme_decreasing_base_period(self): @@ -1526,9 +1426,7 @@ def test_global_wrapped_extreme_decreasing_base_period(self): lons = cube.coord("longitude") # Redefine longitude so that points at (base + period) lons.points = np.linspace(180.0, -180.0, lons.points.size) - result = cube.intersection( - longitude=(lons.points.min(), lons.points.max()) - ) + result = cube.intersection(longitude=(lons.points.min(), lons.points.max())) self.assertArrayEqual(result.data, cube.data) def test_global(self): @@ -1549,9 +1447,7 @@ def test_global_wrapped(self): def test_aux_coord(self): cube = create_cube(0, 360) - cube.replace_coord( - iris.coords.AuxCoord.from_coord(cube.coord("longitude")) - ) + cube.replace_coord(iris.coords.AuxCoord.from_coord(cube.coord("longitude"))) result = cube.intersection(longitude=(0, 360)) self.assertEqual(result.coord("longitude").points[0], 0) self.assertEqual(result.coord("longitude").points[-1], 359) @@ -1560,9 +1456,7 @@ def test_aux_coord(self): def test_aux_coord_wrapped(self): cube = create_cube(0, 360) - cube.replace_coord( - iris.coords.AuxCoord.from_coord(cube.coord("longitude")) - ) + cube.replace_coord(iris.coords.AuxCoord.from_coord(cube.coord("longitude"))) result = cube.intersection(longitude=(-180, 180)) self.assertEqual(result.coord("longitude").points[0], 0) self.assertEqual(result.coord("longitude").points[-1], -1) @@ -1704,41 +1598,29 @@ def test_global_wrapped_extreme_increasing_base_period(self): # Ensure that we can correctly handle bounds defined at (base + period) cube = create_cube(-180.0, 180.0, bounds=True) lons = cube.coord("longitude") - result = cube.intersection( - longitude=(lons.bounds.min(), lons.bounds.max()) - ) + result = cube.intersection(longitude=(lons.bounds.min(), lons.bounds.max())) self.assertArrayEqual(result.data, cube.data) def test_global_wrapped_extreme_decreasing_base_period(self): # Ensure that we can correctly handle bounds defined at (base + period) cube = create_cube(180.0, -180.0, bounds=True) lons = cube.coord("longitude") - result = cube.intersection( - longitude=(lons.bounds.min(), lons.bounds.max()) - ) + result = cube.intersection(longitude=(lons.bounds.min(), lons.bounds.max())) self.assertArrayEqual(result.data, cube.data) def test_misaligned_points_inside(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(169.75, 190.25)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [169.5, 170.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [189.5, 190.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [169.5, 170.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [189.5, 190.5]) self.assertEqual(result.data[0, 0, 0], 170) self.assertEqual(result.data[0, 0, -1], 190) def test_misaligned_points_outside(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(170.25, 189.75)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [169.5, 170.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [189.5, 190.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [169.5, 170.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [189.5, 190.5]) self.assertEqual(result.data[0, 0, 0], 170) self.assertEqual(result.data[0, 0, -1], 190) @@ -1746,46 +1628,32 @@ def test_misaligned_bounds(self): cube = create_cube(-180, 180, bounds=True) result = cube.intersection(longitude=(0, 360)) self.assertArrayEqual(result.coord("longitude").bounds[0], [-0.5, 0.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [358.5, 359.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [358.5, 359.5]) self.assertEqual(result.data[0, 0, 0], 180) self.assertEqual(result.data[0, 0, -1], 179) def test_misaligned_bounds_decreasing(self): cube = create_cube(180, -180, bounds=True) result = cube.intersection(longitude=(0, 360)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [359.5, 358.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [359.5, 358.5]) self.assertArrayEqual(result.coord("longitude").points[-1], 0) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [0.5, -0.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [0.5, -0.5]) self.assertEqual(result.data[0, 0, 0], 181) self.assertEqual(result.data[0, 0, -1], 180) def test_aligned_inclusive(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(170.5, 189.5)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [169.5, 170.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [189.5, 190.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [169.5, 170.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [189.5, 190.5]) self.assertEqual(result.data[0, 0, 0], 170) self.assertEqual(result.data[0, 0, -1], 190) def test_aligned_exclusive(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(170.5, 189.5, False, False)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [170.5, 171.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [188.5, 189.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [170.5, 171.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [188.5, 189.5]) self.assertEqual(result.data[0, 0, 0], 171) self.assertEqual(result.data[0, 0, -1], 189) @@ -1800,9 +1668,7 @@ def test_aligned_bounds_at_modulus(self): def test_negative_aligned_bounds_at_modulus(self): cube = create_cube(0.5, 360.5, bounds=True) result = cube.intersection(longitude=(-180, 180)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-180, -179] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [-180, -179]) self.assertArrayEqual(result.coord("longitude").bounds[-1], [179, 180]) self.assertEqual(result.data[0, 0, 0], 180) self.assertEqual(result.data[0, 0, -1], 179) @@ -1810,60 +1676,40 @@ def test_negative_aligned_bounds_at_modulus(self): def test_negative_misaligned_points_inside(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(-10.25, 10.25)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-10.5, -9.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [9.5, 10.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [-10.5, -9.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [9.5, 10.5]) self.assertEqual(result.data[0, 0, 0], 350) self.assertEqual(result.data[0, 0, -1], 10) def test_negative_misaligned_points_outside(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(-9.75, 9.75)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-10.5, -9.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [9.5, 10.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [-10.5, -9.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [9.5, 10.5]) self.assertEqual(result.data[0, 0, 0], 350) self.assertEqual(result.data[0, 0, -1], 10) def test_negative_aligned_inclusive(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(-10.5, 10.5)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-11.5, -10.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [10.5, 11.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [-11.5, -10.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [10.5, 11.5]) self.assertEqual(result.data[0, 0, 0], 349) self.assertEqual(result.data[0, 0, -1], 11) def test_negative_aligned_exclusive(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(-10.5, 10.5, False, False)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-10.5, -9.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [9.5, 10.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [-10.5, -9.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [9.5, 10.5]) self.assertEqual(result.data[0, 0, 0], 350) self.assertEqual(result.data[0, 0, -1], 10) def test_decrementing(self): cube = create_cube(360, 0, bounds=True) result = cube.intersection(longitude=(40, 60)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [60.5, 59.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [40.5, 39.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [60.5, 59.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [40.5, 39.5]) self.assertEqual(result.data[0, 0, 0], 300) self.assertEqual(result.data[0, 0, -1], 320) @@ -1871,9 +1717,7 @@ def test_decrementing_wrapped(self): cube = create_cube(360, 0, bounds=True) result = cube.intersection(longitude=(-10, 10)) self.assertArrayEqual(result.coord("longitude").bounds[0], [10.5, 9.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [-9.5, -10.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [-9.5, -10.5]) self.assertEqual(result.data[0, 0, 0], 350) self.assertEqual(result.data[0, 0, -1], 10) @@ -1915,16 +1759,10 @@ def test_numerical_tolerance_wrapped(self): def test_ignore_bounds_wrapped(self): # Test `ignore_bounds` fully ignores bounds when wrapping cube = create_cube(0, 360, bounds=True) - result = cube.intersection( - longitude=(10.25, 370.25), ignore_bounds=True - ) + result = cube.intersection(longitude=(10.25, 370.25), ignore_bounds=True) # Expect points 11..370 not bounds [9.5, 10.5] .. [368.5, 369.5] - self.assertArrayEqual( - result.coord("longitude").bounds[0], [10.5, 11.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [369.5, 370.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[0], [10.5, 11.5]) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [369.5, 370.5]) self.assertEqual(result.data[0, 0, 0], 11) self.assertEqual(result.data[0, 0, -1], 10) @@ -1959,9 +1797,7 @@ def test_threshold_wrapped(self): cube = create_cube(-180, 180, bounds=True) result = cube.intersection(longitude=(0.4, 360.4), threshold=0.2) self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [359.5, 360.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [359.5, 360.5]) self.assertEqual(result.data[0, 0, 0], 181) self.assertEqual(result.data[0, 0, -1], 180) @@ -1971,9 +1807,7 @@ def test_threshold_wrapped_gap(self): cube = create_cube(-180, 180, bounds=True) result = cube.intersection(longitude=(0.4, 360.35), threshold=0.2) self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [359.5, 360.5] - ) + self.assertArrayEqual(result.coord("longitude").bounds[-1], [359.5, 360.5]) self.assertEqual(result.data[0, 0, 0], 181) self.assertEqual(result.data[0, 0, -1], 180) @@ -1982,9 +1816,7 @@ def unrolled_cube(): data = np.arange(5, dtype="f4") cube = Cube(data) cube.add_aux_coord( - iris.coords.AuxCoord( - [5.0, 10.0, 8.0, 5.0, 3.0], "longitude", units="degrees" - ), + iris.coords.AuxCoord([5.0, 10.0, 8.0, 5.0, 3.0], "longitude", units="degrees"), 0, ) cube.add_aux_coord( @@ -2005,17 +1837,13 @@ def test_subset(self): def test_subset_wrapped(self): cube = unrolled_cube() result = cube.intersection(longitude=(5 + 360, 8 + 360)) - self.assertArrayEqual( - result.coord("longitude").points, [365, 368, 365] - ) + self.assertArrayEqual(result.coord("longitude").points, [365, 368, 365]) self.assertArrayEqual(result.data, [0, 2, 3]) def test_superset(self): cube = unrolled_cube() result = cube.intersection(longitude=(0, 15)) - self.assertArrayEqual( - result.coord("longitude").points, [5, 10, 8, 5, 3] - ) + self.assertArrayEqual(result.coord("longitude").points, [5, 10, 8, 5, 3]) self.assertArrayEqual(result.data, np.arange(5)) @@ -2032,12 +1860,8 @@ def setUp(self): def test_api(self): sample_points = (("foo", 0.5), ("bar", 0.6)) - result = self.cube.interpolate( - sample_points, self.scheme, self.collapse_coord - ) - self.scheme.interpolator.assert_called_once_with( - self.cube, ("foo", "bar") - ) + result = self.cube.interpolate(sample_points, self.scheme, self.collapse_coord) + self.scheme.interpolator.assert_called_once_with(self.cube, ("foo", "bar")) self.interpolator.assert_called_once_with( (0.5, 0.6), collapse_scalar=self.collapse_coord ) @@ -2078,9 +1902,7 @@ def _check_copy(self, cube, cube_copy): self.assertMaskedArrayEqual(cube_copy.data, cube.data) if cube.data.mask is not ma.nomask: # "No mask" is a constant : all other cases must be distinct. - self.assertIsNot( - cube_copy.core_data().mask, cube.core_data().mask - ) + self.assertIsNot(cube_copy.core_data().mask, cube.core_data().mask) else: self.assertArrayEqual(cube_copy.data, cube.data) @@ -2611,9 +2433,7 @@ def test_different_coordinate(self): def test_different_coordinate_vector(self): cube = Cube([0, 1], long_name="raspberry", units="1") - cube.add_dim_coord( - DimCoord([0, 1], long_name="loganberry", units="1"), 0 - ) + cube.add_dim_coord(DimCoord([0, 1], long_name="loganberry", units="1"), 0) different_coord = DimCoord([2], long_name="loganberry", units="1") result = cube.subset(different_coord) self.assertEqual(result, None) @@ -2639,9 +2459,7 @@ def test_add_aux_coord(self): def test_add_cell_measure(self): cube = Cube(np.arange(6).reshape(2, 3)) - a_cell_measure = CellMeasure( - np.arange(6).reshape(2, 3), long_name="area" - ) + a_cell_measure = CellMeasure(np.arange(6).reshape(2, 3), long_name="area") cube.add_cell_measure(a_cell_measure, [0, 1]) self.assertEqual(cube.cell_measure("area"), a_cell_measure) @@ -2675,9 +2493,7 @@ def test_error_for_add_invalid_aux_factory(self): cube.add_aux_coord(sigma, 0) # Note orography is not added to the cube here factory = HybridHeightFactory(delta=delta, sigma=sigma, orography=orog) - expected_error = ( - "foo coordinate for factory is not present on cube " "bar" - ) + expected_error = "foo coordinate for factory is not present on cube bar" with self.assertRaisesRegex(ValueError, expected_error): cube.add_aux_factory(factory) @@ -2689,9 +2505,7 @@ def setUp(self): cube.add_dim_coord(x_coord, 1) z_coord = AuxCoord(points=np.arange(6).reshape(2, 3), long_name="z") cube.add_aux_coord(z_coord, [0, 1]) - a_cell_measure = CellMeasure( - np.arange(6).reshape(2, 3), long_name="area" - ) + a_cell_measure = CellMeasure(np.arange(6).reshape(2, 3), long_name="area") self.b_cell_measure = CellMeasure( np.arange(6).reshape(2, 3), long_name="other_area" ) @@ -2777,9 +2591,7 @@ def setUp(self): cube.add_dim_coord(y_coord, 0) z_coord = AuxCoord(points=np.arange(6).reshape(2, 3), long_name="z") cube.add_aux_coord(z_coord, [0, 1]) - a_cell_measure = CellMeasure( - np.arange(6).reshape(2, 3), long_name="area" - ) + a_cell_measure = CellMeasure(np.arange(6).reshape(2, 3), long_name="area") cube.add_cell_measure(a_cell_measure, [0, 1]) self.cube = cube @@ -2812,16 +2624,12 @@ def setUp(self): def test_ancillary_variables_2d(self): result = self.cube[0:2, 0:2] self.assertEqual(len(result.ancillary_variables()), 1) - self.assertEqual( - result.shape, result.ancillary_variables()[0].data.shape - ) + self.assertEqual(result.shape, result.ancillary_variables()[0].data.shape) def test_ancillary_variables_1d(self): result = self.cube[0, 0:2] self.assertEqual(len(result.ancillary_variables()), 1) - self.assertEqual( - result.shape, result.ancillary_variables()[0].data.shape - ) + self.assertEqual(result.shape, result.ancillary_variables()[0].data.shape) class TestAncillaryVariables(tests.IrisTest): @@ -2870,9 +2678,7 @@ def test_fail_ancill_variable_dims(self): self.cube.ancillary_variable_dims(ancillary_variable) def test_ancillary_variable_dims_by_name(self): - ancill_var_dims = self.cube.ancillary_variable_dims( - "number_of_observations" - ) + ancill_var_dims = self.cube.ancillary_variable_dims("number_of_observations") self.assertEqual(ancill_var_dims, (0, 1)) def test_fail_ancillary_variable_dims_by_name(self): @@ -2989,22 +2795,16 @@ def test_dim_coords(self): self.assertEqual(self.cube._dim_coords_and_dims, [(x_coord, 2)]) def test_aux_coords(self): - x_coord = AuxCoord( - points=np.array([[2, 3], [8, 4], [7, 9]]), long_name="x" - ) + x_coord = AuxCoord(points=np.array([[2, 3], [8, 4], [7, 9]]), long_name="x") self.cube.add_aux_coord(x_coord, (0, 1)) self.cube.transpose() self.assertEqual(self.cube._aux_coords_and_dims, [(x_coord, (2, 1))]) def test_cell_measures(self): - area_cm = CellMeasure( - np.arange(12).reshape(3, 4), long_name="area of cells" - ) + area_cm = CellMeasure(np.arange(12).reshape(3, 4), long_name="area of cells") self.cube.add_cell_measure(area_cm, (0, 2)) self.cube.transpose() - self.assertEqual( - self.cube._cell_measures_and_dims, [(area_cm, (2, 0))] - ) + self.assertEqual(self.cube._cell_measures_and_dims, [(area_cm, (2, 0))]) def test_ancillary_variables(self): ancill_var = AncillaryVariable( @@ -3223,9 +3023,7 @@ def test_cell_measure_name_found(self, simplecube): assert res == simplecube.cell_measure("cell_area") def test_cell_measure_instance_found(self, simplecube): - res = simplecube._dimensional_metadata( - simplecube.cell_measure("cell_area") - ) + res = simplecube._dimensional_metadata(simplecube.cell_measure("cell_area")) assert res == simplecube.cell_measure("cell_area") def test_ancillary_var_name_found(self, simplecube): @@ -3251,9 +3049,7 @@ def test_two_with_same_name_specify_instance(self, simplecube): # we specify the _DimensionalMetadata instance to ensure it returns the # correct one. simplecube.cell_measure("cell_area").rename("wibble") - res = simplecube._dimensional_metadata( - simplecube.cell_measure("wibble") - ) + res = simplecube._dimensional_metadata(simplecube.cell_measure("wibble")) assert res == simplecube.cell_measure("wibble") @@ -3274,17 +3070,13 @@ class TestReprs: def patched_cubeprinter(self): target = "iris._representation.cube_printout.CubePrinter" instance_mock = mock.MagicMock( - to_string=mock.MagicMock( - return_value="" - ) # NB this must return a string + to_string=mock.MagicMock(return_value="") # NB this must return a string ) with mock.patch(target, return_value=instance_mock) as class_mock: yield class_mock, instance_mock @staticmethod - def _check_expected_effects( - simplecube, patched_cubeprinter, oneline, padding - ): + def _check_expected_effects(simplecube, patched_cubeprinter, oneline, padding): class_mock, instance_mock = patched_cubeprinter assert class_mock.call_args_list == [ # "CubePrinter()" was called exactly once, with the cube as arg @@ -3336,9 +3128,7 @@ class TestHtmlRepr: def patched_cubehtml(self): target = "iris.experimental.representation.CubeRepresentation" instance_mock = mock.MagicMock( - repr_html=mock.MagicMock( - return_value="" - ) # NB this must return a string + repr_html=mock.MagicMock(return_value="") # NB this must return a string ) with mock.patch(target, return_value=instance_mock) as class_mock: yield class_mock, instance_mock @@ -3429,9 +3219,7 @@ def test_fail_assign_duckcellmethod(self): # Can't currently assign a "duck-typed" CellMethod replacement, since # implementation requires class membership (boo!) DuckCellMethod = namedtuple("DuckCellMethod", CellMethod._names) - test_object = DuckCellMethod( - *CellMethod._names - ) # fill props with value==name + test_object = DuckCellMethod(*CellMethod._names) # fill props with value==name with pytest.raises(ValueError, match="not an iris.coords.CellMethod"): self.cube.cell_methods = (test_object,) diff --git a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py index 615de7b8e6..ccf9691e78 100644 --- a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py +++ b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py @@ -16,9 +16,7 @@ @pytest.fixture def sample_attrs() -> CubeAttrsDict: - return CubeAttrsDict( - locals={"a": 1, "z": "this"}, globals={"b": 2, "z": "that"} - ) + return CubeAttrsDict(locals={"a": 1, "z": "this"}, globals={"b": 2, "z": "that"}) def check_content(attrs, locals=None, globals=None, matches=None): @@ -179,9 +177,7 @@ def test__fromkeys(self, value, inputtype): else: assert inputtype == "split_arg" # Check when input is a CubeAttrsDict - keys = CubeAttrsDict( - globals={"a": 1}, locals={"b": 2, "history": 3} - ) + keys = CubeAttrsDict(globals={"a": 1}, locals={"b": 2, "history": 3}) # The result preserves the input keys' local/global identity # N.B. "history" would be global by default (cf. "regular_arg" case) expected = CubeAttrsDict( diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index 386df39b66..1f830c3398 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -46,19 +46,13 @@ def test_fail(self): class Test_concatenate_cube(tests.IrisTest): def setUp(self): - self.units = Unit( - "days since 1970-01-01 00:00:00", calendar="standard" - ) + self.units = Unit("days since 1970-01-01 00:00:00", calendar="standard") self.cube1 = Cube([1, 2, 3], "air_temperature", units="K") - self.cube1.add_dim_coord( - DimCoord([0, 1, 2], "time", units=self.units), 0 - ) + self.cube1.add_dim_coord(DimCoord([0, 1, 2], "time", units=self.units), 0) def test_pass(self): self.cube2 = Cube([1, 2, 3], "air_temperature", units="K") - self.cube2.add_dim_coord( - DimCoord([3, 4, 5], "time", units=self.units), 0 - ) + self.cube2.add_dim_coord(DimCoord([3, 4, 5], "time", units=self.units), 0) result = CubeList([self.cube1, self.cube2]).concatenate_cube() self.assertIsInstance(result, Cube) @@ -71,17 +65,11 @@ def test_fail(self): def test_names_differ_fail(self): self.cube2 = Cube([1, 2, 3], "air_temperature", units="K") - self.cube2.add_dim_coord( - DimCoord([3, 4, 5], "time", units=self.units), 0 - ) + self.cube2.add_dim_coord(DimCoord([3, 4, 5], "time", units=self.units), 0) self.cube3 = Cube([1, 2, 3], "air_pressure", units="Pa") - self.cube3.add_dim_coord( - DimCoord([3, 4, 5], "time", units=self.units), 0 - ) + self.cube3.add_dim_coord(DimCoord([3, 4, 5], "time", units=self.units), 0) exc_regexp = "Cube names differ: air_temperature != air_pressure" - with self.assertRaisesRegex( - iris.exceptions.ConcatenateError, exc_regexp - ): + with self.assertRaisesRegex(iris.exceptions.ConcatenateError, exc_regexp): CubeList([self.cube1, self.cube2, self.cube3]).concatenate_cube() def test_empty(self): @@ -158,13 +146,9 @@ def test_extract_two_dims(self): cubes = iris.cube.CubeList([self.cube[2:, 5:], self.cube[:4, :10]]) a, b = cubes.extract_overlapping(["time", "latitude"]) self.assertEqual(a.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual( - a.coord("latitude"), self.cube.coord("latitude")[5:10] - ) + self.assertEqual(a.coord("latitude"), self.cube.coord("latitude")[5:10]) self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual( - b.coord("latitude"), self.cube.coord("latitude")[5:10] - ) + self.assertEqual(b.coord("latitude"), self.cube.coord("latitude")[5:10]) def test_different_orders(self): cubes = iris.cube.CubeList([self.cube[::-1][:4], self.cube[:4]]) @@ -248,9 +232,7 @@ def _make_cube(fp, rt, t, realization=None): cube = Cube(np.arange(20).reshape(4, 5)) cube.add_dim_coord(DimCoord(np.arange(5), long_name="x", units="1"), 1) cube.add_dim_coord(DimCoord(np.arange(4), long_name="y", units="1"), 0) - cube.add_aux_coord( - DimCoord(fp, standard_name="forecast_period", units="1") - ) + cube.add_aux_coord(DimCoord(fp, standard_name="forecast_period", units="1")) cube.add_aux_coord( DimCoord(rt, standard_name="forecast_reference_time", units="1") ) @@ -273,12 +255,8 @@ def test_orthogonal_with_realization(self): (1, 11, 1), (1, 11, 2), ) - en1_cubes = [ - self._make_cube(*triple, realization=1) for triple in triples - ] - en2_cubes = [ - self._make_cube(*triple, realization=2) for triple in triples - ] + en1_cubes = [self._make_cube(*triple, realization=1) for triple in triples] + en2_cubes = [self._make_cube(*triple, realization=2) for triple in triples] cubes = CubeList(en1_cubes) + CubeList(en2_cubes) (cube,) = cubes.merge() self.assertCML(cube, checksum=False) @@ -295,12 +273,8 @@ def test_combination_with_realization(self): (1, 11, 1), (1, 11, 2), ) - en1_cubes = [ - self._make_cube(*triple, realization=1) for triple in triples - ] - en2_cubes = [ - self._make_cube(*triple, realization=2) for triple in triples - ] + en1_cubes = [self._make_cube(*triple, realization=1) for triple in triples] + en2_cubes = [self._make_cube(*triple, realization=2) for triple in triples] cubes = CubeList(en1_cubes) + CubeList(en2_cubes) (cube,) = cubes.merge() self.assertCML(cube, checksum=False) @@ -317,12 +291,8 @@ def test_combination_with_extra_realization(self): (1, 11, 1), (1, 11, 2), ) - en1_cubes = [ - self._make_cube(*triple, realization=1) for triple in triples - ] - en2_cubes = [ - self._make_cube(*triple, realization=2) for triple in triples - ] + en1_cubes = [self._make_cube(*triple, realization=1) for triple in triples] + en2_cubes = [self._make_cube(*triple, realization=2) for triple in triples] # Add extra that is a duplicate of one of the time triples # but with a different realisation. en3_cubes = [self._make_cube(0, 10, 2, realization=3)] @@ -342,9 +312,7 @@ def test_combination_with_extra_triple(self): (1, 11, 1), (1, 11, 2), ) - en1_cubes = [ - self._make_cube(*triple, realization=1) for triple in triples - ] + en1_cubes = [self._make_cube(*triple, realization=1) for triple in triples] # Add extra time triple on the end. en2_cubes = [ self._make_cube(*triple, realization=2) @@ -476,9 +444,7 @@ def test_single_cube_ok(self): self.check_extract([self.cube_x], self.cons_x, self.cube_x) def test_single_cube_fail__too_few(self): - self.check_extract( - [self.cube_x], self.cons_y, "Got 0 cubes .* expecting 1" - ) + self.check_extract([self.cube_x], self.cons_y, "Got 0 cubes .* expecting 1") def test_single_cube_fail__too_many(self): self.check_extract( @@ -566,9 +532,7 @@ def test_single_cube_ok(self): ) # NOTE: always returns list NOT cube def test_single_cube__fail_mismatch(self): - self.check_extract( - [self.cube_x], self.cons_y, "Got 0 cubes .* expecting 1" - ) + self.check_extract([self.cube_x], self.cons_y, "Got 0 cubes .* expecting 1") def test_multi_cube_ok(self): self.check_extract( @@ -687,27 +651,18 @@ def setUp(self): self.cubes = CubeList([iris.tests.stock.lat_lon_cube()]) def test_summary(self): - expected = ( - "0: unknown / (unknown) " - " (latitude: 3; longitude: 4)" - ) + expected = "0: unknown / (unknown) (latitude: 3; longitude: 4)" self.assertEqual(str(self.cubes), expected) def test_summary_name_unit(self): self.cubes[0].long_name = "aname" self.cubes[0].units = "1" - expected = ( - "0: aname / (1) " - " (latitude: 3; longitude: 4)" - ) + expected = "0: aname / (1) (latitude: 3; longitude: 4)" self.assertEqual(str(self.cubes), expected) def test_summary_stash(self): self.cubes[0].attributes["STASH"] = STASH.from_msi("m01s00i004") - expected = ( - "0: m01s00i004 / (unknown) " - " (latitude: 3; longitude: 4)" - ) + expected = "0: m01s00i004 / (unknown) (latitude: 3; longitude: 4)" self.assertEqual(str(self.cubes), expected) @@ -720,9 +675,7 @@ def test_realise_data(self): call_patch = self.patch("iris._lazy_data.co_realise_cubes") test_cubelist.realise_data() # Check it was called once, passing cubes as *args. - self.assertEqual( - call_patch.call_args_list, [mock.call(*mock_cubes_list)] - ) + self.assertEqual(call_patch.call_args_list, [mock.call(*mock_cubes_list)]) class Test_CubeList_copy(tests.IrisTest): diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py index 854a0d431a..517e684d46 100644 --- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -28,9 +28,7 @@ class Test_aggregated_by(tests.IrisTest): def setUp(self): self.cube = Cube(np.arange(44).reshape(4, 11)) - val_coord = AuxCoord( - [0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val" - ) + val_coord = AuxCoord([0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val") label_coord = AuxCoord( [ "alpha", @@ -49,9 +47,7 @@ def setUp(self): units="no_unit", ) simple_agg_coord = AuxCoord([1, 1, 2, 2], long_name="simple_agg") - spanning_coord = AuxCoord( - np.arange(44).reshape(4, 11), long_name="spanning" - ) + spanning_coord = AuxCoord(np.arange(44).reshape(4, 11), long_name="spanning") spanning_label_coord = AuxCoord( np.arange(1, 441, 10).reshape(4, 11).astype(str), long_name="span_label", @@ -88,9 +84,7 @@ def mock_weighted_aggregate(*_, **kwargs): side_effect=lambda x, y, z, **kwargs: y ) - self.ancillary_variable = AncillaryVariable( - [0, 1, 2, 3], long_name="foo" - ) + self.ancillary_variable = AncillaryVariable([0, 1, 2, 3], long_name="foo") self.cube.add_ancillary_variable(self.ancillary_variable, 0) self.cell_measure = CellMeasure([0, 1, 2, 3], long_name="bar") self.cube.add_cell_measure(self.cell_measure, 0) @@ -105,9 +99,7 @@ def test_2d_coord_simple_agg(self): for res_slice, cube_slice in zip( res_cube.slices("simple_agg"), self.cube.slices("simple_agg") ): - cube_slice_agg = cube_slice.aggregated_by( - "simple_agg", self.mock_agg - ) + cube_slice_agg = cube_slice.aggregated_by("simple_agg", self.mock_agg) self.assertEqual( res_slice.coord("spanning"), cube_slice_agg.coord("spanning") ) @@ -139,9 +131,7 @@ def test_agg_by_label_bounded(self): # and val entries are not in step; the resulting cube has a val # coord of bounded cells and a label coord of single string entries. val_points = self.cube.coord("val").points - self.cube.coord("val").bounds = np.array( - [val_points - 0.5, val_points + 0.5] - ).T + self.cube.coord("val").bounds = np.array([val_points - 0.5, val_points + 0.5]).T res_cube = self.cube.aggregated_by("label", self.mock_agg) val_coord = AuxCoord( np.array([1.0, 0.5, 1.0]), @@ -205,15 +195,11 @@ def test_single_string_aggregation(self): ) result = cube.aggregated_by("foo", MEAN) self.assertEqual(result.shape, (2, 4)) - self.assertEqual( - result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar") - ) + self.assertEqual(result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar")) def test_ancillary_variables_and_cell_measures_kept(self): cube_agg = self.cube.aggregated_by("val", self.mock_agg) - self.assertEqual( - cube_agg.ancillary_variables(), [self.ancillary_variable] - ) + self.assertEqual(cube_agg.ancillary_variables(), [self.ancillary_variable]) self.assertEqual(cube_agg.cell_measures(), [self.cell_measure]) def test_ancillary_variables_and_cell_measures_removed(self): @@ -274,9 +260,7 @@ def test_1d_weights(self): ) def test_2d_weights(self): - self.cube.aggregated_by( - "val", self.mock_weighted_agg, weights=self.val_weights - ) + self.cube.aggregated_by("val", self.mock_weighted_agg, weights=self.val_weights) self.assertEqual(self.mock_weighted_agg.aggregate.call_count, 3) @@ -296,9 +280,7 @@ def test_2d_weights(self): ), ) self.assertEqual(call_1.kwargs["axis"], 1) - np.testing.assert_array_almost_equal( - call_1.kwargs["weights"], np.ones((4, 6)) - ) + np.testing.assert_array_almost_equal(call_1.kwargs["weights"], np.ones((4, 6))) call_2 = self.mock_weighted_agg.aggregate.mock_calls[1] np.testing.assert_array_equal( @@ -306,18 +288,14 @@ def test_2d_weights(self): np.array([[3, 4, 10], [14, 15, 21], [25, 26, 32], [36, 37, 43]]), ) self.assertEqual(call_2.kwargs["axis"], 1) - np.testing.assert_array_almost_equal( - call_2.kwargs["weights"], np.ones((4, 3)) - ) + np.testing.assert_array_almost_equal(call_2.kwargs["weights"], np.ones((4, 3))) call_3 = self.mock_weighted_agg.aggregate.mock_calls[2] np.testing.assert_array_equal( call_3.args[0], np.array([[5, 8], [16, 19], [27, 30], [38, 41]]) ) self.assertEqual(call_3.kwargs["axis"], 1) - np.testing.assert_array_almost_equal( - call_3.kwargs["weights"], np.ones((4, 2)) - ) + np.testing.assert_array_almost_equal(call_3.kwargs["weights"], np.ones((4, 2))) def test_returned(self): output = self.cube.aggregated_by( @@ -358,9 +336,7 @@ def setUp(self): self.lazydata = as_lazy_data(self.data) self.cube = Cube(self.lazydata) - val_coord = AuxCoord( - [0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val" - ) + val_coord = AuxCoord([0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val") label_coord = AuxCoord( [ "alpha", @@ -456,9 +432,7 @@ def test_single_string_aggregation__lazy(self): result = cube.aggregated_by("foo", MEAN) self.assertTrue(result.has_lazy_data()) self.assertEqual(result.shape, (2, 4)) - self.assertEqual( - result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar") - ) + self.assertEqual(result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar")) self.assertArrayEqual(result.data, means) self.assertFalse(result.has_lazy_data()) @@ -487,16 +461,12 @@ def test_1d_weights__lazy(self): 146.0, 150.0, ] - np.testing.assert_array_almost_equal( - cube_agg.data, np.array([row_0, row_1]) - ) + np.testing.assert_array_almost_equal(cube_agg.data, np.array([row_0, row_1])) def test_2d_weights__lazy(self): self.assertTrue(self.cube.has_lazy_data()) - cube_agg = self.cube.aggregated_by( - "val", SUM, weights=self.val_weights - ) + cube_agg = self.cube.aggregated_by("val", SUM, weights=self.val_weights) self.assertTrue(self.cube.has_lazy_data()) self.assertTrue(cube_agg.has_lazy_data()) @@ -544,9 +514,7 @@ def test_returned__lazy(self): 146.0, 150.0, ] - np.testing.assert_array_almost_equal( - cube.data, np.array([row_0, row_1]) - ) + np.testing.assert_array_almost_equal(cube.data, np.array([row_0, row_1])) weights = output[1] self.assertEqual(weights.shape, (2, 11)) @@ -613,9 +581,7 @@ def get_result( ) if second_categorised: - categorised_coord2 = AuxCoord( - np.tile([0, 1, 2, 3, 4], 4), long_name="cat2" - ) + categorised_coord2 = AuxCoord(np.tile([0, 1, 2, 3, 4], 4), long_name="cat2") categorised_coords = [categorised_coord1, categorised_coord2] else: categorised_coords = categorised_coord1 @@ -660,9 +626,7 @@ def test_basic(self): aligned_coord = result.coord("aligned") self.assertArrayEqual(aligned_coord.points, np.arange(2)) - self.assertArrayEqual( - aligned_coord.bounds, np.array([[0, 18], [1, 19]]) - ) + self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) self.assertTrue(aligned_coord.climatological) self.assertIn(aligned_coord, result.dim_coords) @@ -680,9 +644,7 @@ def test_2d_other_coord(self): aligned_coord = result.coord("aligned") self.assertArrayEqual(aligned_coord.points, np.arange(2)) - self.assertArrayEqual( - aligned_coord.bounds, np.array([[0, 18], [1, 19]]) - ) + self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) self.assertTrue(aligned_coord.climatological) part_aligned_coord = result.coord("part_aligned") @@ -691,9 +653,7 @@ def test_2d_other_coord(self): ) self.assertArrayEqual( part_aligned_coord.bounds, - np.array([np.arange(1, 11), np.arange(91, 101)]).T.reshape( - 2, 5, 2 - ), + np.array([np.arange(1, 11), np.arange(91, 101)]).T.reshape(2, 5, 2), ) self.assertFalse(part_aligned_coord.climatological) @@ -708,20 +668,14 @@ def test_2d_timelike_other_coord(self): aligned_coord = result.coord("aligned") self.assertArrayEqual(aligned_coord.points, np.arange(2)) - self.assertArrayEqual( - aligned_coord.bounds, np.array([[0, 18], [1, 19]]) - ) + self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) self.assertTrue(aligned_coord.climatological) part_aligned_coord = result.coord("part_aligned") - self.assertArrayEqual( - part_aligned_coord.points, np.arange(1, 11).reshape(2, 5) - ) + self.assertArrayEqual(part_aligned_coord.points, np.arange(1, 11).reshape(2, 5)) self.assertArrayEqual( part_aligned_coord.bounds, - np.array([np.arange(1, 11), np.arange(91, 101)]).T.reshape( - 2, 5, 2 - ), + np.array([np.arange(1, 11), np.arange(91, 101)]).T.reshape(2, 5, 2), ) self.assertTrue(part_aligned_coord.climatological) @@ -733,9 +687,7 @@ def test_transposed(self): aligned_coord = result.coord("aligned") self.assertArrayEqual(aligned_coord.points, np.arange(2)) - self.assertArrayEqual( - aligned_coord.bounds, np.array([[0, 18], [1, 19]]) - ) + self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) self.assertTrue(aligned_coord.climatological) categorised_coord = result.coord("cat1") @@ -769,16 +721,12 @@ def test_multiple_agg_coords(self): self.assertTrue(aligned_coord.climatological) categorised_coord1 = result.coord("cat1") - self.assertArrayEqual( - categorised_coord1.points, np.tile(np.arange(2), 5) - ) + self.assertArrayEqual(categorised_coord1.points, np.tile(np.arange(2), 5)) self.assertIsNone(categorised_coord1.bounds) self.assertFalse(categorised_coord1.climatological) categorised_coord2 = result.coord("cat2") - self.assertArrayEqual( - categorised_coord2.points, np.tile(np.arange(5), 2) - ) + self.assertArrayEqual(categorised_coord2.points, np.tile(np.arange(5), 2)) self.assertIsNone(categorised_coord2.bounds) self.assertFalse(categorised_coord2.climatological) @@ -791,9 +739,7 @@ def test_non_climatological_units(self): aligned_coord = result.coord("aligned") self.assertArrayEqual(aligned_coord.points, np.arange(9, 11)) - self.assertArrayEqual( - aligned_coord.bounds, np.array([[0, 18], [1, 19]]) - ) + self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) self.assertFalse(aligned_coord.climatological) def test_clim_in_clim_op(self): @@ -842,9 +788,7 @@ def test_clim_in_no_clim_op(self): class Test_aggregated_by__derived(tests.IrisTest): def setUp(self): self.cube = realistic_4d()[:, :10, :6, :8] - self.time_cat_coord = AuxCoord( - [0, 0, 1, 1, 2, 2], long_name="time_cat" - ) + self.time_cat_coord = AuxCoord([0, 0, 1, 1, 2, 2], long_name="time_cat") self.cube.add_aux_coord(self.time_cat_coord, 0) height_data = np.zeros(self.cube.shape[1]) height_data[5:] = 1 diff --git a/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py b/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py index af726c0fa9..5795736db7 100644 --- a/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py +++ b/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py @@ -176,9 +176,7 @@ def test_(self): with self.temp_filename(".tif") as temp_filename: export_geotiff(cube, temp_filename) dataset = gdal.Open(temp_filename, gdal.GA_ReadOnly) - self.assertEqual( - dataset.GetGeoTransform(), (-12.5, 5, 0, 55, 0, -10) - ) + self.assertEqual(dataset.GetGeoTransform(), (-12.5, 5, 0, 55, 0, -10)) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py index aadb07c882..7515fad08a 100644 --- a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py +++ b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py @@ -86,8 +86,7 @@ def test_names(self): # Check the dimension names used as column headings are split out and # formatted correctly. expected_coord_names = [ - c.name().replace("_", " ") - for c in self.cube.coords(dim_coords=True) + c.name().replace("_", " ") for c in self.cube.coords(dim_coords=True) ] result_coord_names = self.representer.names[1:] for result in result_coord_names: @@ -146,9 +145,7 @@ def test_headings__dimcoords(self): def test_headings__auxcoords(self): contents = self.representer.sections_data["Auxiliary coordinates:"] content_str = ",".join(content for content in contents) - aux_coords = [ - c.name() for c in self.cube.aux_coords if c.shape != (1,) - ] + aux_coords = [c.name() for c in self.cube.aux_coords if c.shape != (1,)] for coord in aux_coords: self.assertIn(coord, content_str) @@ -162,18 +159,14 @@ def test_headings__derivedcoords(self): def test_headings__cellmeasures(self): contents = self.representer.sections_data["Cell measures:"] content_str = ",".join(content for content in contents) - cell_measures = [ - c.name() for c in self.cube.cell_measures() if c.shape != (1,) - ] + cell_measures = [c.name() for c in self.cube.cell_measures() if c.shape != (1,)] for coord in cell_measures: self.assertIn(coord, content_str) def test_headings__ancillaryvars(self): contents = self.representer.sections_data["Ancillary variables:"] content_str = ",".join(content for content in contents) - ancillary_variables = [ - c.name() for c in self.cube.ancillary_variables() - ] + ancillary_variables = [c.name() for c in self.cube.ancillary_variables()] for coord in ancillary_variables: self.assertIn(coord, content_str) @@ -189,9 +182,7 @@ def test_headings__scalarcellmeasures(self): def test_headings__scalarcoords(self): contents = self.representer.sections_data["Scalar coordinates:"] content_str = ",".join(content for content in contents) - scalar_coords = [ - c.name() for c in self.cube.coords() if c.shape == (1,) - ] + scalar_coords = [c.name() for c in self.cube.coords() if c.shape == (1,)] for coord in scalar_coords: self.assertIn(coord, content_str) @@ -370,9 +361,7 @@ def test_mesh_included(self): self.assertIn( 'Mesh', self.mesh_result ) - mesh_coord_names = [ - c.name() for c in self.mesh_cube.coords(mesh_coords=True) - ] + mesh_coord_names = [c.name() for c in self.mesh_cube.coords(mesh_coords=True)] for coord_name in mesh_coord_names: self.assertIn(coord_name, self.result) @@ -407,9 +396,7 @@ def _cube_stringattribute_html(name, attr): return result def test_simple_string_attribute(self): - html = self._cube_stringattribute_html( - "single-string", "single string" - ) + html = self._cube_stringattribute_html("single-string", "single string") self.assertString(html) def test_long_string_attribute(self): diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index a0db398257..7f425d371d 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -54,21 +54,15 @@ def test_broadcast_fail_tgt_levels(self): def test_standard_input(self): for axis in self.axes: - result = relevel( - self.cube, self.src_levels, [-1, 0, 5.5], axis=axis - ) - assert_array_equal( - result.data.flatten(), np.array([np.nan, 0, 55]) - ) + result = relevel(self.cube, self.src_levels, [-1, 0, 5.5], axis=axis) + assert_array_equal(result.data.flatten(), np.array([np.nan, 0, 55])) expected = DimCoord([-1, 0, 5.5], units=1, long_name="thingness") self.assertEqual(expected, result.coord("thingness")) def test_non_monotonic(self): for axis in self.axes: result = relevel(self.cube, self.src_levels, [2, 3, 2], axis=axis) - assert_array_equal( - result.data.flatten(), np.array([20, 30, np.nan]) - ) + assert_array_equal(result.data.flatten(), np.array([20, 30, np.nan])) expected = AuxCoord([2, 3, 2], units=1, long_name="thingness") self.assertEqual(result.coord("thingness"), expected) @@ -100,9 +94,7 @@ def test_custom_interpolator(self): axis=axis, interpolator=interpolator, ) - assert_array_equal( - result.data.flatten(), np.array([np.nan, 0, 120]) - ) + assert_array_equal(result.data.flatten(), np.array([np.nan, 0, 120])) def test_multi_dim_target_levels(self): interpolator = partial( diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py index 22914215b7..79eb9aac1e 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py @@ -49,9 +49,7 @@ def test_cf_identities(self): } # ONLY expecting ref_subject, excluding ref_not_subject. expected = { - subject_name: CFUGridAuxiliaryCoordinateVariable( - subject_name, ref_subject - ) + subject_name: CFUGridAuxiliaryCoordinateVariable(subject_name, ref_subject) } for identity in self.cf_identities: @@ -65,8 +63,7 @@ def test_duplicate_refs(self): subject_name = "ref_subject" ref_subject = named_variable(subject_name) ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for var in ref_source_vars.values(): setattr(var, self.cf_identities[0], subject_name) @@ -80,22 +77,17 @@ def test_duplicate_refs(self): # ONLY expecting ref_subject, excluding ref_not_subject. expected = { - subject_name: CFUGridAuxiliaryCoordinateVariable( - subject_name, ref_subject - ) + subject_name: CFUGridAuxiliaryCoordinateVariable(subject_name, ref_subject) } result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) self.assertDictEqual(expected, result) def test_two_coords(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for ix, var in enumerate(ref_source_vars.values()): setattr(var, self.cf_identities[ix], subject_names[ix]) @@ -115,9 +107,7 @@ def test_two_coords(self): def test_two_part_ref(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} ref_source = named_variable("ref_source") setattr(ref_source, self.cf_identities[0], " ".join(subject_names)) @@ -149,13 +139,10 @@ def test_string_type_ignored(self): def test_ignore(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for ix, var in enumerate(ref_source_vars.values()): setattr(var, self.cf_identities[0], subject_names[ix]) @@ -179,9 +166,7 @@ def test_ignore(self): def test_target(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} source_names = ("ref_source_1", "ref_source_2") ref_source_vars = {name: named_variable(name) for name in source_names} @@ -219,16 +204,14 @@ def operation(warn: bool): "emit at least 1 warning", category=iris.exceptions.IrisUserWarning, ) - result = CFUGridAuxiliaryCoordinateVariable.identify( - vars_all, warn=warn - ) + result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all, warn=warn) self.assertDictEqual({}, result) # Missing warning. - warn_regex = rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" - with pytest.warns( - iris.exceptions.IrisCfMissingVarWarning, match=warn_regex - ): + warn_regex = ( + rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" + ) + with pytest.warns(iris.exceptions.IrisCfMissingVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -237,12 +220,8 @@ def operation(warn: bool): # String variable warning. warn_regex = r".*is a CF-netCDF label variable.*" - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - with pytest.warns( - iris.exceptions.IrisCfLabelVarWarning, match=warn_regex - ): + vars_all[subject_name] = netcdf_ugrid_variable(subject_name, "", np.bytes_) + with pytest.warns(iris.exceptions.IrisCfLabelVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py index 5fae20e6fc..1dd45c323b 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py @@ -42,9 +42,7 @@ def test_cf_identities(self): } # ONLY expecting ref_subject, excluding ref_not_subject. expected = { - subject_name: CFUGridConnectivityVariable( - subject_name, ref_subject - ) + subject_name: CFUGridConnectivityVariable(subject_name, ref_subject) } for identity in Connectivity.UGRID_CF_ROLES: @@ -58,8 +56,7 @@ def test_duplicate_refs(self): subject_name = "ref_subject" ref_subject = named_variable(subject_name) ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for var in ref_source_vars.values(): setattr(var, Connectivity.UGRID_CF_ROLES[0], subject_name) @@ -73,22 +70,17 @@ def test_duplicate_refs(self): # ONLY expecting ref_subject, excluding ref_not_subject. expected = { - subject_name: CFUGridConnectivityVariable( - subject_name, ref_subject - ) + subject_name: CFUGridConnectivityVariable(subject_name, ref_subject) } result = CFUGridConnectivityVariable.identify(vars_all) self.assertDictEqual(expected, result) def test_two_cf_roles(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for ix, var in enumerate(ref_source_vars.values()): setattr(var, Connectivity.UGRID_CF_ROLES[ix], subject_names[ix]) @@ -111,9 +103,7 @@ def test_two_part_ref_ignored(self): # cf role - invalid UGRID. subject_name = "ref_subject" ref_source = named_variable("ref_source") - setattr( - ref_source, Connectivity.UGRID_CF_ROLES[0], subject_name + " foo" - ) + setattr(ref_source, Connectivity.UGRID_CF_ROLES[0], subject_name + " foo") vars_all = { subject_name: named_variable(subject_name), "ref_not_subject": named_variable("ref_not_subject"), @@ -138,13 +128,10 @@ def test_string_type_ignored(self): def test_ignore(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for ix, var in enumerate(ref_source_vars.values()): setattr(var, Connectivity.UGRID_CF_ROLES[0], subject_names[ix]) @@ -161,16 +148,12 @@ def test_ignore(self): expected_name, ref_subject_vars[expected_name] ) } - result = CFUGridConnectivityVariable.identify( - vars_all, ignore=subject_names[1] - ) + result = CFUGridConnectivityVariable.identify(vars_all, ignore=subject_names[1]) self.assertDictEqual(expected, result) def test_target(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} source_names = ("ref_source_1", "ref_source_2") ref_source_vars = {name: named_variable(name) for name in source_names} @@ -189,9 +172,7 @@ def test_target(self): expected_name, ref_subject_vars[expected_name] ) } - result = CFUGridConnectivityVariable.identify( - vars_all, target=source_names[0] - ) + result = CFUGridConnectivityVariable.identify(vars_all, target=source_names[0]) self.assertDictEqual(expected, result) def test_warn(self): @@ -212,12 +193,8 @@ def operation(warn: bool): self.assertDictEqual({}, result) # Missing warning. - warn_regex = ( - rf"Missing CF-UGRID connectivity variable {subject_name}.*" - ) - with pytest.warns( - iris.exceptions.IrisCfMissingVarWarning, match=warn_regex - ): + warn_regex = rf"Missing CF-UGRID connectivity variable {subject_name}.*" + with pytest.warns(iris.exceptions.IrisCfMissingVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -226,12 +203,8 @@ def operation(warn: bool): # String variable warning. warn_regex = r".*is a CF-netCDF label variable.*" - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - with pytest.warns( - iris.exceptions.IrisCfLabelVarWarning, match=warn_regex - ): + vars_all[subject_name] = netcdf_ugrid_variable(subject_name, "", np.bytes_) + with pytest.warns(iris.exceptions.IrisCfLabelVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py index 1e707d9550..a252618e85 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py @@ -31,27 +31,19 @@ def setUp(self): def test_inherited(self): coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") self.cf_group[coord_var.cf_name] = coord_var - self.assertEqual( - coord_var, self.cf_group.coordinates[coord_var.cf_name] - ) + self.assertEqual(coord_var, self.cf_group.coordinates[coord_var.cf_name]) def test_connectivities(self): - conn_var = MagicMock( - spec=CFUGridConnectivityVariable, cf_name="conn_var" - ) + conn_var = MagicMock(spec=CFUGridConnectivityVariable, cf_name="conn_var") self.cf_group[conn_var.cf_name] = conn_var - self.assertEqual( - conn_var, self.cf_group.connectivities[conn_var.cf_name] - ) + self.assertEqual(conn_var, self.cf_group.connectivities[conn_var.cf_name]) def test_ugrid_coords(self): coord_var = MagicMock( spec=CFUGridAuxiliaryCoordinateVariable, cf_name="coord_var" ) self.cf_group[coord_var.cf_name] = coord_var - self.assertEqual( - coord_var, self.cf_group.ugrid_coords[coord_var.cf_name] - ) + self.assertEqual(coord_var, self.cf_group.ugrid_coords[coord_var.cf_name]) def test_meshes(self): mesh_var = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var") @@ -61,17 +53,13 @@ def test_meshes(self): def test_non_data_names(self): data_var = MagicMock(spec=CFDataVariable, cf_name="data_var") coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") - conn_var = MagicMock( - spec=CFUGridConnectivityVariable, cf_name="conn_var" - ) + conn_var = MagicMock(spec=CFUGridConnectivityVariable, cf_name="conn_var") ugrid_coord_var = MagicMock( spec=CFUGridAuxiliaryCoordinateVariable, cf_name="ugrid_coord_var" ) mesh_var = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var") mesh_var2 = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var2") - duplicate_name_var = MagicMock( - spec=CFUGridMeshVariable, cf_name="coord_var" - ) + duplicate_name_var = MagicMock(spec=CFUGridMeshVariable, cf_name="coord_var") for var in ( data_var, diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py index 59d3a8aad9..10212f5ae5 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py @@ -66,9 +66,7 @@ def test_cf_identity(self): } # ONLY expecting ref_subject, excluding ref_not_subject. - expected = { - subject_name: CFUGridMeshVariable(subject_name, ref_subject) - } + expected = {subject_name: CFUGridMeshVariable(subject_name, ref_subject)} result = CFUGridMeshVariable.identify(vars_all) self.assertDictEqual(expected, result) @@ -107,8 +105,7 @@ def test_duplicate_refs(self): subject_name = "ref_subject" ref_subject = named_variable(subject_name) ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for var in ref_source_vars.values(): setattr(var, self.cf_identity, subject_name) @@ -121,21 +118,16 @@ def test_duplicate_refs(self): ) # ONLY expecting ref_subject, excluding ref_not_subject. - expected = { - subject_name: CFUGridMeshVariable(subject_name, ref_subject) - } + expected = {subject_name: CFUGridMeshVariable(subject_name, ref_subject)} result = CFUGridMeshVariable.identify(vars_all) self.assertDictEqual(expected, result) def test_two_refs(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for ix, var in enumerate(ref_source_vars.values()): setattr(var, self.cf_identity, subject_names[ix]) @@ -183,13 +175,10 @@ def test_string_type_ignored(self): def test_ignore(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") + name: named_variable(name) for name in ("ref_source_1", "ref_source_2") } for ix, var in enumerate(ref_source_vars.values()): setattr(var, self.cf_identity, subject_names[ix]) @@ -206,16 +195,12 @@ def test_ignore(self): expected_name, ref_subject_vars[expected_name] ) } - result = CFUGridMeshVariable.identify( - vars_all, ignore=subject_names[1] - ) + result = CFUGridMeshVariable.identify(vars_all, ignore=subject_names[1]) self.assertDictEqual(expected, result) def test_target(self): subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } + ref_subject_vars = {name: named_variable(name) for name in subject_names} source_names = ("ref_source_1", "ref_source_2") ref_source_vars = {name: named_variable(name) for name in source_names} @@ -256,9 +241,7 @@ def operation(warn: bool): # Missing warning. warn_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" - with pytest.warns( - iris.exceptions.IrisCfMissingVarWarning, match=warn_regex - ): + with pytest.warns(iris.exceptions.IrisCfMissingVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -267,12 +250,8 @@ def operation(warn: bool): # String variable warning. warn_regex = r".*is a CF-netCDF label variable.*" - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - with pytest.warns( - iris.exceptions.IrisCfLabelVarWarning, match=warn_regex - ): + vars_all[subject_name] = netcdf_ugrid_variable(subject_name, "", np.bytes_) + with pytest.warns(iris.exceptions.IrisCfLabelVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py index 52eb569b43..d2dd32f1e3 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py @@ -58,9 +58,7 @@ def setUpClass(cls): cls.node_y = netcdf_ugrid_variable("node_y", "node", float) cls.face_x = netcdf_ugrid_variable("face_x", "face", float) cls.face_y = netcdf_ugrid_variable("face_y", "face", float) - cls.face_nodes = netcdf_ugrid_variable( - "face_nodes", "face vertex", int - ) + cls.face_nodes = netcdf_ugrid_variable("face_nodes", "face vertex", int) cls.levels = netcdf_ugrid_variable("levels", "levels", int) cls.data = netcdf_ugrid_variable( "data", "levels face", float, coordinates="face_x face_y" @@ -109,9 +107,7 @@ def test_inherited(self): self.assertDictEqual(expected, getattr(self.cf_group, collection)) def test_connectivities(self): - expected_var = CFUGridConnectivityVariable( - "face_nodes", self.face_nodes - ) + expected_var = CFUGridConnectivityVariable("face_nodes", self.face_nodes) expected = {expected_var.cf_name: expected_var} self.assertDictEqual(expected, self.cf_group.connectivities) @@ -121,9 +117,7 @@ def test_mesh(self): self.assertDictEqual(expected, self.cf_group.meshes) def test_ugrid_coords(self): - names = [ - f"{loc}_{ax}" for loc in ("node", "face") for ax in ("x", "y") - ] + names = [f"{loc}_{ax}" for loc in ("node", "face") for ax in ("x", "y")] expected = { name: CFUGridAuxiliaryCoordinateVariable(name, getattr(self, name)) for name in names diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py index 8dab48ae9c..010ecddd09 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py @@ -17,9 +17,7 @@ class Tests(tests.IrisTest): # All 'real' tests have been done for load_meshes(). Here we just check # that load_mesh() works with load_meshes() correctly, using mocking. def setUp(self): - self.load_meshes_mock = self.patch( - "iris.experimental.ugrid.load.load_meshes" - ) + self.load_meshes_mock = self.patch("iris.experimental.ugrid.load.load_meshes") # The expected return from load_meshes - a dict of files, each with # a list of meshes. self.load_meshes_mock.return_value = {"file": ["mesh"]} diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py index 40d839951b..02b835bbfa 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py @@ -15,11 +15,7 @@ import tempfile from uuid import uuid4 -from iris.experimental.ugrid.load import ( - PARSE_UGRID_ON_LOAD, - load_meshes, - logger, -) +from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD, load_meshes, logger from iris.tests.stock.netcdf import ncgen_from_cdl @@ -91,9 +87,7 @@ def add_second_mesh(self): """ vars_string = "variables:" vars_start = self.ref_cdl.index(vars_string) + len(vars_string) - new_cdl = ( - self.ref_cdl[:vars_start] + cdl_extra + self.ref_cdl[vars_start:] - ) + new_cdl = self.ref_cdl[:vars_start] + cdl_extra + self.ref_cdl[vars_start:] return new_cdl, second_name def test_with_data(self): @@ -110,9 +104,7 @@ def test_with_data(self): def test_no_data(self): cdl_lines = self.ref_cdl.split("\n") - cdl_lines = filter( - lambda line: ':mesh = "mesh"' not in line, cdl_lines - ) + cdl_lines = filter(lambda line: ':mesh = "mesh"' not in line, cdl_lines) ref_cdl = "\n".join(cdl_lines) nc_path = cdl_to_nc(ref_cdl) @@ -184,9 +176,7 @@ def test_no_parsing(self): _ = load_meshes(nc_path) def test_invalid_scheme(self): - with self.assertRaisesRegex( - ValueError, "Iris cannot handle the URI scheme:.*" - ): + with self.assertRaisesRegex(ValueError, "Iris cannot handle the URI scheme:.*"): with PARSE_UGRID_ON_LOAD.context(): _ = load_meshes("foo://bar") @@ -205,9 +195,7 @@ class TestsHttp(tests.IrisTest): # Tests of HTTP (OpenDAP) loading need mocking since we can't have tests # that rely on 3rd party servers. def setUp(self): - self.format_agent_mock = self.patch( - "iris.fileformats.FORMAT_AGENT.get_spec" - ) + self.format_agent_mock = self.patch("iris.fileformats.FORMAT_AGENT.get_spec") def test_http(self): url = "https://foo" @@ -223,8 +211,6 @@ def test_mixed_sources(self): with PARSE_UGRID_ON_LOAD.context(): _ = load_meshes([url, glob]) - file_uris = [ - call[0][0] for call in self.format_agent_mock.call_args_list - ] + file_uris = [call[0][0] for call in self.format_agent_mock.call_args_list] for source in (url, Path(file).name): self.assertIn(source, file_uris) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index 7251597006..b84b32cf41 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -46,19 +46,13 @@ def test_connected(self): self.assertEqual(expected, self.connectivity.connected) def test_start_index(self): - self.assertEqual( - self.kwargs["start_index"], self.connectivity.start_index - ) + self.assertEqual(self.kwargs["start_index"], self.connectivity.start_index) def test_location_axis(self): - self.assertEqual( - self.kwargs["location_axis"], self.connectivity.location_axis - ) + self.assertEqual(self.kwargs["location_axis"], self.connectivity.location_axis) def test_indices(self): - self.assertArrayEqual( - self.kwargs["indices"], self.connectivity.indices - ) + self.assertArrayEqual(self.kwargs["indices"], self.connectivity.indices) def test_read_only(self): attributes = ("indices", "cf_role", "start_index", "location_axis") @@ -87,17 +81,13 @@ def test_lazy_indices(self): self.assertTrue(is_lazy_data(self.connectivity.lazy_indices())) def test_core_indices(self): - self.assertArrayEqual( - self.kwargs["indices"], self.connectivity.core_indices() - ) + self.assertArrayEqual(self.kwargs["indices"], self.connectivity.core_indices()) def test_has_lazy_indices(self): self.assertFalse(self.connectivity.has_lazy_indices()) def test_lazy_location_lengths(self): - self.assertTrue( - is_lazy_data(self.connectivity.lazy_location_lengths()) - ) + self.assertTrue(is_lazy_data(self.connectivity.lazy_location_lengths())) def test_location_lengths(self): expected = [4, 4, 4] @@ -126,7 +116,9 @@ def test___str__(self): self.assertEqual(expected, self.connectivity.__str__()) def test___repr__(self): - expected = "" + expected = ( + "" + ) self.assertEqual(expected, self.connectivity.__repr__()) def test_xml_element(self): @@ -141,9 +133,7 @@ def test___eq__(self): equivalent_kwargs["indices"] = self.kwargs["indices"].transpose() equivalent_kwargs["location_axis"] = 1 - self.kwargs["location_axis"] equivalent = Connectivity(**equivalent_kwargs) - self.assertFalse( - np.array_equal(equivalent.indices, self.connectivity.indices) - ) + self.assertFalse(np.array_equal(equivalent.indices, self.connectivity.indices)) self.assertEqual(equivalent, self.connectivity) def test_different(self): @@ -172,9 +162,7 @@ def test_copy(self): def test_indices_by_location(self): expected = self.kwargs["indices"].transpose() - self.assertArrayEqual( - expected, self.connectivity.indices_by_location() - ) + self.assertArrayEqual(expected, self.connectivity.indices_by_location()) def test_indices_by_location_input(self): expected = as_lazy_data(self.kwargs["indices"].transpose()) @@ -192,9 +180,7 @@ def setUp(self): self.lazy_indices = as_lazy_data(data) def common(self, indices): - connectivity = Connectivity( - indices=indices, cf_role="face_node_connectivity" - ) + connectivity = Connectivity(indices=indices, cf_role="face_node_connectivity") self.assertArrayEqual(indices, connectivity.indices) def test_int32(self): @@ -247,9 +233,7 @@ def test_cf_role(self): "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), "cf_role": "error", } - self.assertRaisesRegex( - ValueError, "Invalid cf_role .", Connectivity, **kwargs - ) + self.assertRaisesRegex(ValueError, "Invalid cf_role .", Connectivity, **kwargs) def test_indices_int(self): kwargs = { @@ -268,9 +252,7 @@ def test_indices_start_index(self): "indices": np.linspace(-9, -1, 9, dtype=int).reshape((-1, 3)), "cf_role": "face_node_connectivity", } - self.assertRaisesRegex( - ValueError, " < start_index", Connectivity, **kwargs - ) + self.assertRaisesRegex(ValueError, " < start_index", Connectivity, **kwargs) def test_indices_dims_low(self): kwargs = { diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index 6784bb6e50..37bd49d346 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -37,12 +37,8 @@ def setUpClass(cls): cls.EDGE_LAT = AuxCoord( [0, 0.5, 0.5], standard_name="latitude", var_name="edge_lat" ) - cls.FACE_LON = AuxCoord( - [0.5], standard_name="longitude", var_name="face_lon" - ) - cls.FACE_LAT = AuxCoord( - [0.5], standard_name="latitude", var_name="face_lat" - ) + cls.FACE_LON = AuxCoord([0.5], standard_name="longitude", var_name="face_lon") + cls.FACE_LAT = AuxCoord([0.5], standard_name="latitude", var_name="face_lat") cls.EDGE_NODE = mesh.Connectivity( [[0, 1], [1, 2], [2, 0]], @@ -51,16 +47,10 @@ def setUpClass(cls): var_name="var_name", attributes={"test": 1}, ) - cls.FACE_NODE = mesh.Connectivity( - [[0, 1, 2]], cf_role="face_node_connectivity" - ) - cls.FACE_EDGE = mesh.Connectivity( - [[0, 1, 2]], cf_role="face_edge_connectivity" - ) + cls.FACE_NODE = mesh.Connectivity([[0, 1, 2]], cf_role="face_node_connectivity") + cls.FACE_EDGE = mesh.Connectivity([[0, 1, 2]], cf_role="face_edge_connectivity") # (Actually meaningless:) - cls.FACE_FACE = mesh.Connectivity( - [[0, 0, 0]], cf_role="face_face_connectivity" - ) + cls.FACE_FACE = mesh.Connectivity([[0, 0, 0]], cf_role="face_face_connectivity") # (Actually meaningless:) cls.EDGE_FACE = mesh.Connectivity( [[0, 0], [0, 0], [0, 0]], cf_role="edge_face_connectivity" @@ -191,9 +181,7 @@ def test_connectivities(self): {"cf_role": "edge_node_connectivity"}, ) - fake_connectivity = tests.mock.Mock( - __class__=mesh.Connectivity, cf_role="fake" - ) + fake_connectivity = tests.mock.Mock(__class__=mesh.Connectivity, cf_role="fake") negative_kwargs = ( {"item": fake_connectivity}, {"item": "foo"}, @@ -238,9 +226,7 @@ def test_coord(self): # See Mesh.coords tests for thorough coverage of cases. func = self.mesh.coord exception = CoordinateNotFoundError - self.assertRaisesRegex( - exception, ".*but found 2", func, include_nodes=True - ) + self.assertRaisesRegex(exception, ".*but found 2", func, include_nodes=True) self.assertRaisesRegex(exception, ".*but found none", func, axis="t") def test_coords(self): @@ -307,9 +293,7 @@ def test_coords_elements(self): self.assertEqual([], func(include_faces=True)) def test_edge_dimension(self): - self.assertEqual( - self.kwargs["edge_dimension"], self.mesh.edge_dimension - ) + self.assertEqual(self.kwargs["edge_dimension"], self.mesh.edge_dimension) def test_edge_coords(self): expected = mesh.MeshEdgeCoords(self.EDGE_LON, self.EDGE_LAT) @@ -346,9 +330,7 @@ def test_node_coords(self): self.assertEqual(expected, self.mesh.node_coords) def test_node_dimension(self): - self.assertEqual( - self.kwargs["node_dimension"], self.mesh.node_dimension - ) + self.assertEqual(self.kwargs["node_dimension"], self.mesh.node_dimension) def test_topology_dimension(self): self.assertEqual( @@ -503,18 +485,14 @@ def test_all_coords(self): self.assertEqual(expected, self.mesh.all_coords) def test_boundary_node(self): - self.assertEqual( - self.BOUNDARY_NODE, self.mesh.boundary_node_connectivity - ) + self.assertEqual(self.BOUNDARY_NODE, self.mesh.boundary_node_connectivity) def test_connectivity(self): # See Mesh.connectivities tests for thorough coverage of cases. # Can only test Mesh.connectivity for 2D since we need >1 connectivity. func = self.mesh.connectivity exception = ConnectivityNotFoundError - self.assertRaisesRegex( - exception, ".*but found 3", func, contains_node=True - ) + self.assertRaisesRegex(exception, ".*but found 3", func, contains_node=True) self.assertRaisesRegex( exception, ".*but found none", @@ -634,9 +612,7 @@ def test_face_coords(self): self.assertEqual(expected, self.mesh.face_coords) def test_face_dimension(self): - self.assertEqual( - self.kwargs["face_dimension"], self.mesh.face_dimension - ) + self.assertEqual(self.kwargs["face_dimension"], self.mesh.face_dimension) def test_face_edge(self): self.assertEqual(self.FACE_EDGE, self.mesh.face_edge_connectivity) @@ -778,9 +754,7 @@ def test___setstate__(self): self.assertEqual(false_metadata_manager, self.mesh._metadata_manager) self.assertEqual(false_coord_manager, self.mesh._coord_manager) - self.assertEqual( - false_connectivity_manager, self.mesh._connectivity_manager - ) + self.assertEqual(false_connectivity_manager, self.mesh._connectivity_manager) def test_add_connectivities(self): # Cannot test ADD - 1D - nothing extra to add beyond minimum. @@ -914,9 +888,7 @@ def test_add_coords_single(self): ) def test_add_coords_single_face(self): - self.assertRaises( - TypeError, self.mesh.add_coords, face_x=self.FACE_LON - ) + self.assertRaises(TypeError, self.mesh.add_coords, face_x=self.FACE_LON) def test_dimension_names(self): # Test defaults. @@ -972,9 +944,7 @@ def test_remove_connectivities(self): {"contains_edge": True, "contains_node": True}, ) - fake_connectivity = tests.mock.Mock( - __class__=mesh.Connectivity, cf_role="fake" - ) + fake_connectivity = tests.mock.Mock(__class__=mesh.Connectivity, cf_role="fake") negative_kwargs = ( {"item": fake_connectivity}, {"item": "foo"}, @@ -1074,9 +1044,7 @@ def test_to_MeshCoords(self): def test_to_MeshCoords_face(self): location = "face" - self.assertRaises( - CoordinateNotFoundError, self.mesh.to_MeshCoords, location - ) + self.assertRaises(CoordinateNotFoundError, self.mesh.to_MeshCoords, location) class TestOperations2D(TestOperations1D): @@ -1108,9 +1076,7 @@ def test_add_connectivities(self): for new_len in (False, True): # First replace with ones of same length, then with ones of # different length. - kwargs = { - k: self.new_connectivity(v, new_len) for k, v in kwargs.items() - } + kwargs = {k: self.new_connectivity(v, new_len) for k, v in kwargs.items()} self.mesh.add_connectivities(*kwargs.values()) self.assertEqual( mesh.Mesh2DConnectivities(**kwargs), @@ -1141,9 +1107,7 @@ def test_add_connectivities_inconsistent(self): ) def test_add_connectivities_invalid(self): - fake_cf_role = tests.mock.Mock( - __class__=mesh.Connectivity, cf_role="foo" - ) + fake_cf_role = tests.mock.Mock(__class__=mesh.Connectivity, cf_role="foo") log_regex = r"Not adding connectivity.*" with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): self.mesh.add_connectivities(fake_cf_role) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index a023762d10..2bd8e5ddc4 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -137,9 +137,7 @@ class Test__points_and_bounds(tests.IrisTest): # See Test_MeshCoord__dataviews for more detailed checks. def test_node(self): meshcoord = sample_meshcoord(location="node") - n_nodes = ( - iris.tests.stock.mesh._TEST_N_NODES - ) # n-nodes default for sample mesh + n_nodes = iris.tests.stock.mesh._TEST_N_NODES # n-nodes default for sample mesh self.assertIsNone(meshcoord.core_bounds()) self.assertArrayAllClose(meshcoord.points, 1100 + np.arange(n_nodes)) @@ -148,9 +146,7 @@ def test_edge(self): points, bounds = meshcoord.core_points(), meshcoord.core_bounds() self.assertEqual(points.shape, meshcoord.shape) self.assertEqual(bounds.shape, meshcoord.shape + (2,)) - self.assertArrayAllClose( - meshcoord.points, [2100, 2101, 2102, 2103, 2104] - ) + self.assertArrayAllClose(meshcoord.points, [2100, 2101, 2102, 2103, 2104]) self.assertArrayAllClose( meshcoord.bounds, [ @@ -780,9 +776,7 @@ def _check_bounds_bad_index_values(self, lazy): # Indicate how many "extra" missing results this should cause. "n_extra_bad_points": 2, } - self._make_test_meshcoord( - facenodes_changes=facenodes_modify, lazy_sources=lazy - ) + self._make_test_meshcoord(facenodes_changes=facenodes_modify, lazy_sources=lazy) self._check_expected_bounds_values() def test_bounds_badvalues__real(self): @@ -837,22 +831,16 @@ def axis_x_or_y(self, request): def test_node_meshcoord(self, axis_x_or_y): # MeshCoord metadata matches that of the relevant node coord. self.setup_mesh(location="node", axis=axis_x_or_y) - meshcoord = self.mesh.to_MeshCoord( - location=self.location, axis=self.axis - ) + meshcoord = self.mesh.to_MeshCoord(location=self.location, axis=self.axis) self.coord_metadata_matches(meshcoord, self.node_coord) def test_faceedge_basic(self, location_face_or_edge, axis_x_or_y): # MeshCoord metadata matches that of the face/edge ("points") coord. self.setup_mesh(location_face_or_edge, axis_x_or_y) - meshcoord = self.mesh.to_MeshCoord( - location=self.location, axis=self.axis - ) + meshcoord = self.mesh.to_MeshCoord(location=self.location, axis=self.axis) self.coord_metadata_matches(meshcoord, self.location_coord) - @pytest.mark.parametrize( - "fieldname", ["long_name", "var_name", "attributes"] - ) + @pytest.mark.parametrize("fieldname", ["long_name", "var_name", "attributes"]) def test_faceedge_dontcare_fields( self, location_face_or_edge, axis_x_or_y, fieldname ): @@ -867,9 +855,7 @@ def test_faceedge_dontcare_fields( setattr(self.location_coord, fieldname, different_value) # Mostly.. just check this does not cause an error, as it would do if we # modified "standard_name" or "units" (see other tests) ... - meshcoord = self.mesh.to_MeshCoord( - location=self.location, axis=self.axis - ) + meshcoord = self.mesh.to_MeshCoord(location=self.location, axis=self.axis) # ... but also, check that the result matches the expected face/edge coord. self.coord_metadata_matches(meshcoord, self.location_coord) @@ -888,13 +874,9 @@ def test_faceedge_fail_mismatched_stdnames( f"'{node_name}' instead of '{location_name}'" ) with pytest.raises(ValueError, match=msg): - self.mesh.to_MeshCoord( - location=location_face_or_edge, axis=axis_x_or_y - ) + self.mesh.to_MeshCoord(location=location_face_or_edge, axis=axis_x_or_y) - def test_faceedge_fail_missing_stdnames( - self, location_face_or_edge, axis_x_or_y - ): + def test_faceedge_fail_missing_stdnames(self, location_face_or_edge, axis_x_or_y): # "standard_name" compared with None also causes an error. self.setup_mesh(location_face_or_edge, axis_x_or_y) self.node_coord.standard_name = None @@ -911,13 +893,9 @@ def test_faceedge_fail_missing_stdnames( f"None instead of '{location_name}'" ) with pytest.raises(ValueError, match=msg): - self.mesh.to_MeshCoord( - location=location_face_or_edge, axis=axis_x_or_y - ) + self.mesh.to_MeshCoord(location=location_face_or_edge, axis=axis_x_or_y) - def test_faceedge_fail_mismatched_units( - self, location_face_or_edge, axis_x_or_y - ): + def test_faceedge_fail_mismatched_units(self, location_face_or_edge, axis_x_or_y): # Different "units" for node and face/edge causes an error. self.setup_mesh(location_face_or_edge, axis_x_or_y) self.node_coord.units = "hPa" @@ -928,18 +906,14 @@ def test_faceedge_fail_mismatched_units( "'hPa' instead of 'degrees'" ) with pytest.raises(ValueError, match=msg): - self.mesh.to_MeshCoord( - location=location_face_or_edge, axis=axis_x_or_y - ) + self.mesh.to_MeshCoord(location=location_face_or_edge, axis=axis_x_or_y) def test_faceedge_missing_units(self, location_face_or_edge, axis_x_or_y): # Units compared with a None ("unknown") is not an error. self.setup_mesh(location_face_or_edge, axis_x_or_y) self.node_coord.units = None # This is OK - meshcoord = self.mesh.to_MeshCoord( - location=self.location, axis=self.axis - ) + meshcoord = self.mesh.to_MeshCoord(location=self.location, axis=self.axis) # ... but also, check that the result matches the expected face/edge coord. self.coord_metadata_matches(meshcoord, self.location_coord) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py index 2581bf106a..d374a98144 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py @@ -47,12 +47,8 @@ def test_dimensionality(self): mesh = self.create() self.assertEqual(1, mesh.topology_dimension) - self.assertArrayEqual( - [0, 1, 1, 2, 2, 3], mesh.node_coords.node_x.points - ) - self.assertArrayEqual( - [0, 1, 2, 3, 1, 2], mesh.node_coords.node_y.points - ) + self.assertArrayEqual([0, 1, 1, 2, 2, 3], mesh.node_coords.node_x.points) + self.assertArrayEqual([0, 1, 2, 3, 1, 2], mesh.node_coords.node_y.points) self.assertArrayEqual([0.5, 1.5, 2.5], mesh.edge_coords.edge_x.points) self.assertArrayEqual([0.5, 2.5, 1.5], mesh.edge_coords.edge_y.points) self.assertIsNone(getattr(mesh, "face_coords", None)) @@ -104,12 +100,8 @@ def test_mesh_metadata(self): def test_lazy(self): self.lon = AuxCoord.from_coord(self.lon) - self.lon = self.lon.copy( - self.lon.lazy_points(), self.lon.lazy_bounds() - ) - self.lat = self.lat.copy( - self.lat.lazy_points(), self.lat.lazy_bounds() - ) + self.lon = self.lon.copy(self.lon.lazy_points(), self.lon.lazy_bounds()) + self.lat = self.lat.copy(self.lat.lazy_points(), self.lat.lazy_bounds()) mesh = self.create() for coord in list(mesh.all_coords): @@ -124,17 +116,11 @@ def test_coord_shape_mismatch(self): self.lat = lat_orig.copy( points=lat_orig.points, bounds=np.tile(lat_orig.bounds, 2) ) - with self.assertRaisesRegex( - ValueError, "bounds shapes are not identical" - ): + with self.assertRaisesRegex(ValueError, "bounds shapes are not identical"): _ = self.create() - self.lat = lat_orig.copy( - points=lat_orig.points[-1], bounds=lat_orig.bounds[-1] - ) - with self.assertRaisesRegex( - ValueError, "points shapes are not identical" - ): + self.lat = lat_orig.copy(points=lat_orig.points[-1], bounds=lat_orig.bounds[-1]) + with self.assertRaisesRegex(ValueError, "points shapes are not identical"): _ = self.create() def test_reorder(self): @@ -148,9 +134,7 @@ def test_reorder(self): def test_non_xy(self): for coord in self.lon, self.lat: coord.standard_name = None - lon_name, lat_name = [ - coord.long_name for coord in (self.lon, self.lat) - ] + lon_name, lat_name = [coord.long_name for coord in (self.lon, self.lat)] # Swap the coords. self.lat, self.lon = self.lon, self.lat with self.assertLogs(logger, "INFO", "Unable to find 'X' and 'Y'"): @@ -187,9 +171,7 @@ def test_dimensionality(self): for conn_name in Connectivity.UGRID_CF_ROLES: conn = getattr(mesh, conn_name, None) if conn_name == "face_node_connectivity": - self.assertArrayEqual( - [[0, 1, 2], [3, 4, 5], [6, 7, 8]], conn.indices - ) + self.assertArrayEqual([[0, 1, 2], [3, 4, 5], [6, 7, 8]], conn.indices) else: self.assertIsNone(conn) @@ -215,9 +197,7 @@ def test_mixed_shapes(self): self.lat.bounds = np.ma.masked_equal(lat_bounds, 999) mesh = self.create() - self.assertArrayEqual( - mesh.face_node_connectivity.location_lengths(), [4, 4, 3] - ) + self.assertArrayEqual(mesh.face_node_connectivity.location_lengths(), [4, 4, 3]) self.assertEqual(mesh.node_coords.node_x.points[-1], 0.0) self.assertEqual(mesh.node_coords.node_y.points[-1], 0.0) @@ -246,7 +226,5 @@ class TestInvalidPoints(tests.IrisTest): def test_2d_coord(self): cube = simple_2d_w_multidim_coords()[:3, :3] coord_1, coord_2 = cube.coords() - with self.assertRaisesRegex( - ValueError, "Expected coordinate ndim == 1" - ): + with self.assertRaisesRegex(ValueError, "Expected coordinate ndim == 1"): _ = Mesh.from_coords(coord_1, coord_2) diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py index 3b8e5ded9f..57218cd299 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py @@ -145,9 +145,7 @@ def test_op_lenient_same_members_none(self): right[member] = None rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=True): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -178,9 +176,7 @@ def test_op_lenient_different_members(self): right[member] = self.dummy rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=True): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -219,9 +215,7 @@ def test_op_strict_different_members(self): right[member] = self.dummy rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=False): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -252,9 +246,7 @@ def test_op_strict_different_members_none(self): right[member] = None rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=False): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -317,9 +309,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) + self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) @@ -384,15 +374,9 @@ def test_op_lenient_same_members_none(self): rmetadata = self.cls(**right) expected = right.copy() - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_lenient_different(self): lmetadata = self.cls(**self.values) @@ -415,15 +399,9 @@ def test_op_lenient_different_members(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -455,15 +433,9 @@ def test_op_strict_different_members(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_strict_different_none(self): lmetadata = self.cls(**self.values) @@ -486,15 +458,9 @@ def test_op_strict_different_members_none(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) class Test_difference(tests.IrisTest): @@ -514,9 +480,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) + self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) @@ -583,15 +547,9 @@ def test_op_lenient_same_members_none(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = (None, member_value) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different(self): left = self.values.copy() @@ -605,12 +563,8 @@ def test_op_lenient_different(self): rexpected["units"] = lexpected["units"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different_members(self): for member in self.cls._members: @@ -624,15 +578,9 @@ def test_op_lenient_different_members(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -654,12 +602,8 @@ def test_op_strict_different(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_members(self): for member in self.cls._members: @@ -673,15 +617,9 @@ def test_op_strict_different_members(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_none(self): left = self.values.copy() @@ -695,12 +633,8 @@ def test_op_strict_different_none(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_members_none(self): for member in self.cls._members: @@ -714,15 +648,9 @@ def test_op_strict_different_members_none(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) class Test_equal(tests.IrisTest): diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py index 0786c52934..5a4befa9ce 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py @@ -135,9 +135,7 @@ def test_op_lenient_same_members_none(self): right[member] = None rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=True): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -158,9 +156,7 @@ def test_op_lenient_different_members(self): right[member] = self.dummy rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=True): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -189,9 +185,7 @@ def test_op_strict_different_members(self): right[member] = self.dummy rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=False): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -212,9 +206,7 @@ def test_op_strict_different_members_none(self): right[member] = None rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=False): self.assertFalse(lmetadata.__eq__(rmetadata)) self.assertFalse(rmetadata.__eq__(lmetadata)) @@ -276,9 +268,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) + self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) @@ -343,15 +333,9 @@ def test_op_lenient_same_members_none(self): rmetadata = self.cls(**right) expected = right.copy() - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_lenient_different(self): lmetadata = self.cls(**self.values) @@ -374,15 +358,9 @@ def test_op_lenient_different_members(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -414,15 +392,9 @@ def test_op_strict_different_members(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_strict_different_none(self): lmetadata = self.cls(**self.values) @@ -445,15 +417,9 @@ def test_op_strict_different_members_none(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) class Test_difference(tests.IrisTest): @@ -472,9 +438,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) + self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) @@ -541,15 +505,9 @@ def test_op_lenient_same_members_none(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = (None, member_value) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different(self): left = self.values.copy() @@ -563,12 +521,8 @@ def test_op_lenient_different(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different_members(self): for member in self.cls._members: @@ -582,15 +536,9 @@ def test_op_lenient_different_members(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -612,12 +560,8 @@ def test_op_strict_different(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_members(self): for member in self.cls._members: @@ -631,15 +575,9 @@ def test_op_strict_different_members(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_none(self): left = self.values.copy() @@ -653,12 +591,8 @@ def test_op_strict_different_none(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_members_none(self): for member in self.cls._members: @@ -672,15 +606,9 @@ def test_op_strict_different_members_none(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) class Test_equal(tests.IrisTest): diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py index ba7199b777..bc7e3e70db 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py @@ -163,9 +163,7 @@ def test_op_lenient_same_dim_names_none(self): right[member] = None rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=True): self.assertTrue(lmetadata.__eq__(rmetadata)) self.assertTrue(rmetadata.__eq__(lmetadata)) @@ -196,9 +194,7 @@ def test_op_lenient_different_dim_names(self): right[member] = self.dummy rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=True): self.assertTrue(lmetadata.__eq__(rmetadata)) self.assertTrue(rmetadata.__eq__(lmetadata)) @@ -237,9 +233,7 @@ def test_op_strict_different_dim_names(self): right[member] = self.dummy rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=False): self.assertTrue(lmetadata.__eq__(rmetadata)) self.assertTrue(rmetadata.__eq__(lmetadata)) @@ -270,9 +264,7 @@ def test_op_strict_different_dim_names_none(self): right[member] = None rmetadata = self.cls(**right) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): + with mock.patch("iris.common.metadata._LENIENT", return_value=False): self.assertTrue(lmetadata.__eq__(rmetadata)) self.assertTrue(rmetadata.__eq__(lmetadata)) @@ -326,9 +318,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) + self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) @@ -393,15 +383,9 @@ def test_op_lenient_same_members_none(self): rmetadata = self.cls(**right) expected = right.copy() - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_lenient_different(self): lmetadata = self.cls(**self.values) @@ -424,15 +408,9 @@ def test_op_lenient_different_members(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -464,15 +442,9 @@ def test_op_strict_different_members(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_strict_different_none(self): lmetadata = self.cls(**self.values) @@ -495,15 +467,9 @@ def test_op_strict_different_members_none(self): expected = self.values.copy() expected[member] = None - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) class Test_difference(tests.IrisTest): @@ -524,9 +490,7 @@ def setUp(self): self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) + self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) @@ -593,15 +557,9 @@ def test_op_lenient_same_members_none(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = (None, member_value) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different(self): left = self.values.copy() @@ -615,12 +573,8 @@ def test_op_lenient_different(self): rexpected["units"] = lexpected["units"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_lenient_different_members(self): for member in self.cls._members: @@ -634,15 +588,9 @@ def test_op_lenient_different_members(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_same(self): lmetadata = self.cls(**self.values) @@ -664,12 +612,8 @@ def test_op_strict_different(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_members(self): for member in self.cls._members: @@ -683,15 +627,9 @@ def test_op_strict_different_members(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_none(self): left = self.values.copy() @@ -705,12 +643,8 @@ def test_op_strict_different_none(self): rexpected["long_name"] = lexpected["long_name"][::-1] with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) def test_op_strict_different_members_none(self): for member in self.cls._members: @@ -724,15 +658,9 @@ def test_op_strict_different_members_none(self): rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) + self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) class Test_equal(tests.IrisTest): diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py index a3cd91815f..b42eed9f34 100644 --- a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py @@ -47,8 +47,7 @@ def common_test_setup(self, shape_3d=(0, 2), data_chunks=None): region_len = n_mesh // n_regions i_points = np.arange(n_mesh) region_inds = [ - np.where((i_points // region_len) == i_region) - for i_region in range(n_regions) + np.where((i_points // region_len) == i_region) for i_region in range(n_regions) ] # Disturb slightly to ensure some gaps + some overlaps region_inds = [list(indarr[0]) for indarr in region_inds] @@ -135,9 +134,7 @@ def test_missing_points(self): result_all = recombine_submeshes(self.mesh_cube, self.region_cubes) self.assertTrue(np.all(~result_all[..., inds].data.mask)) # Without region1, all points in reg1 are masked - regions_not2 = [ - cube for cube in self.region_cubes if cube is not region2 - ] + regions_not2 = [cube for cube in self.region_cubes if cube is not region2] result_not2 = recombine_submeshes(self.mesh_cube, regions_not2) self.assertTrue(np.all(result_not2[..., inds].data.mask)) @@ -150,16 +147,12 @@ def test_transposed(self): result = recombine_submeshes(self.mesh_cube, self.region_cubes) self.assertTrue(result.has_lazy_data()) self.assertEqual(result.mesh_dim(), 0) - self.assertMaskedArrayEqual( - result.data.transpose(), self.expected_result - ) + self.assertMaskedArrayEqual(result.data.transpose(), self.expected_result) def test_dtype(self): # Check that result dtype comes from submeshes, not mesh_cube. self.assertEqual(self.mesh_cube.dtype, np.float64) - self.assertTrue( - all(cube.dtype == np.float64 for cube in self.region_cubes) - ) + self.assertTrue(all(cube.dtype == np.float64 for cube in self.region_cubes)) result = recombine_submeshes(self.mesh_cube, self.region_cubes) self.assertEqual(result.dtype, np.float64) region_cubes2 = [ @@ -278,9 +271,7 @@ def test_single_region(self): self.assertEqual(result1, result2) def test_fail_no_regions(self): - with self.assertRaisesRegex( - ValueError, "'submesh_cubes' must be non-empty" - ): + with self.assertRaisesRegex(ValueError, "'submesh_cubes' must be non-empty"): recombine_submeshes(self.mesh_cube, []) def test_fail_dims_mismatch_mesh_regions(self): @@ -347,10 +338,7 @@ def test_fail_dtype_mismatch_region_regions(self): def test_fail_dimcoord_sub_no_mesh(self): self.mesh_cube.remove_coord("level") - msg = ( - 'has a dim-coord "level" for dimension 0, ' - "but 'mesh_cube' has none." - ) + msg = 'has a dim-coord "level" for dimension 0, ' "but 'mesh_cube' has none." with self.assertRaisesRegex(ValueError, msg): recombine_submeshes(self.mesh_cube, self.region_cubes) diff --git a/lib/iris/tests/unit/fileformats/__init__.py b/lib/iris/tests/unit/fileformats/__init__.py index 4c0bca25c1..4b76ac5d48 100644 --- a/lib/iris/tests/unit/fileformats/__init__.py +++ b/lib/iris/tests/unit/fileformats/__init__.py @@ -57,9 +57,7 @@ def sorted_by_coordname(list): return sorted(list, key=lambda item: item[0].name()) coords_and_dims_got = sorted_by_coordname(coords_and_dims_got) - coords_and_dims_expected = sorted_by_coordname( - coords_and_dims_expected - ) + coords_and_dims_expected = sorted_by_coordname(coords_and_dims_expected) self.assertEqual(coords_and_dims_got, coords_and_dims_expected) # Also check coordinate type equivalences (as Coord.__eq__ does not). self.assertEqual( diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py index 48f383d7f7..e1b4b7a7cd 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py @@ -25,14 +25,10 @@ def setUp(self): def test_non_data_names(self): data_var = MagicMock(spec=CFDataVariable, cf_name="data_var") - aux_var = MagicMock( - spec=CFAuxiliaryCoordinateVariable, cf_name="aux_var" - ) + aux_var = MagicMock(spec=CFAuxiliaryCoordinateVariable, cf_name="aux_var") coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") coord_var2 = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var2") - duplicate_name_var = MagicMock( - spec=CFCoordinateVariable, cf_name="aux_var" - ) + duplicate_name_var = MagicMock(spec=CFCoordinateVariable, cf_name="aux_var") for var in ( data_var, @@ -43,8 +39,6 @@ def test_non_data_names(self): ): self.cf_group[var.cf_name] = var - expected_names = [ - var.cf_name for var in (aux_var, coord_var, coord_var2) - ] + expected_names = [var.cf_name for var in (aux_var, coord_var, coord_var2)] expected = set(expected_names) self.assertEqual(expected, self.cf_group.non_data_variable_names) diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 4829d03dbb..ae6a87de3f 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -74,25 +74,15 @@ def test_create_global_attributes(self): return_value=self.dataset, ): global_attrs = CFReader("dummy").cf_group.global_attributes - self.assertEqual( - global_attrs["dimensions"], "something something_else" - ) + self.assertEqual(global_attrs["dimensions"], "something something_else") class Test_translate__formula_terms(tests.IrisTest): def setUp(self): - self.delta = netcdf_variable( - "delta", "height", np.float64, bounds="delta_bnds" - ) - self.delta_bnds = netcdf_variable( - "delta_bnds", "height bnds", np.float64 - ) - self.sigma = netcdf_variable( - "sigma", "height", np.float64, bounds="sigma_bnds" - ) - self.sigma_bnds = netcdf_variable( - "sigma_bnds", "height bnds", np.float64 - ) + self.delta = netcdf_variable("delta", "height", np.float64, bounds="delta_bnds") + self.delta_bnds = netcdf_variable("delta_bnds", "height bnds", np.float64) + self.sigma = netcdf_variable("sigma", "height", np.float64, bounds="sigma_bnds") + self.sigma_bnds = netcdf_variable("sigma_bnds", "height bnds", np.float64) self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" standard_name = "atmosphere_hybrid_height_coordinate" @@ -137,9 +127,7 @@ def setUp(self): file_format="NetCDF4", variables=self.variables, ncattrs=ncattrs ) # Restrict the CFReader functionality to only performing translations. - build_patch = mock.patch( - "iris.fileformats.cf.CFReader._build_cf_groups" - ) + build_patch = mock.patch("iris.fileformats.cf.CFReader._build_cf_groups") reset_patch = mock.patch("iris.fileformats.cf.CFReader._reset") build_patch.start() reset_patch.start() @@ -186,18 +174,10 @@ def test_create_formula_terms(self): class Test_build_cf_groups__formula_terms(tests.IrisTest): def setUp(self): - self.delta = netcdf_variable( - "delta", "height", np.float64, bounds="delta_bnds" - ) - self.delta_bnds = netcdf_variable( - "delta_bnds", "height bnds", np.float64 - ) - self.sigma = netcdf_variable( - "sigma", "height", np.float64, bounds="sigma_bnds" - ) - self.sigma_bnds = netcdf_variable( - "sigma_bnds", "height bnds", np.float64 - ) + self.delta = netcdf_variable("delta", "height", np.float64, bounds="delta_bnds") + self.delta_bnds = netcdf_variable("delta_bnds", "height bnds", np.float64) + self.sigma = netcdf_variable("sigma", "height", np.float64, bounds="sigma_bnds") + self.sigma_bnds = netcdf_variable("sigma_bnds", "height bnds", np.float64) self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" standard_name = "atmosphere_hybrid_height_coordinate" @@ -283,9 +263,7 @@ def test_associate_formula_terms_with_data_variable(self): self.assertIs(group[name].cf_data, getattr(self, name)) # Check all the auxiliary coordinates are formula terms. formula_terms = cf_group.formula_terms - self.assertTrue( - set(formula_terms.items()).issubset(list(group.items())) - ) + self.assertTrue(set(formula_terms.items()).issubset(list(group.items()))) # Check the terms by root. for name, term in zip(aux_coordinates, ["a", "b", "orog"]): self.assertEqual( diff --git a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py index a29eb625d0..ce4d6d6217 100644 --- a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py +++ b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py @@ -24,9 +24,7 @@ def setUp(self): # Also patch the private path variable to the existing value (i.e. no # change), and restore it after each test: As these tests modify it, # that can potentially break subsequent 'normal' behaviour. - self.patch( - "iris.fileformats.dot._DOT_EXECUTABLE_PATH", _DOT_EXECUTABLE_PATH - ) + self.patch("iris.fileformats.dot._DOT_EXECUTABLE_PATH", _DOT_EXECUTABLE_PATH) def test_valid_absolute_path(self): # Override the configuration value for System.dot_path diff --git a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py index 98bc42ddf3..2a09a60275 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py @@ -32,9 +32,7 @@ def test_none(self): self._test(row=None, yp=None, yv=None) def test_1d(self): - self._test( - row=np.array([[0], [1], [2], [3]]), yp=np.array([0, 1, 2]), yv=None - ) + self._test(row=np.array([[0], [1], [2], [3]]), yp=np.array([0, 1, 2]), yv=None) def test_2d(self): self._test( diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index 15bb61e230..c06d8db85a 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -30,11 +30,11 @@ _DummyField = collections.namedtuple( - "_DummyField", "lbext lblrec lbnrec raw_lbpack " "lbuser boundary_packing" + "_DummyField", "lbext lblrec lbnrec raw_lbpack lbuser boundary_packing" ) _DummyFieldWithSize = collections.namedtuple( "_DummyFieldWithSize", - "lbext lblrec lbnrec raw_lbpack " "lbuser boundary_packing " "lbnpt lbrow", + "lbext lblrec lbnrec raw_lbpack lbuser boundary_packing lbnpt lbrow", ) _DummyBoundaryPacking = collections.namedtuple( "_DummyBoundaryPacking", "x_halo y_halo rim_width" @@ -228,9 +228,7 @@ def setUp(self): field.lbnrec = 50 field.boundary_packing = None - def _test( - self, mock_field, expected_depth, expected_dtype, word_depth=None - ): + def _test(self, mock_field, expected_depth, expected_dtype, word_depth=None): with mock.patch("iris.fileformats._ff.FFHeader", return_value=None): kwargs = {} if word_depth is not None: @@ -399,9 +397,7 @@ def test_lbpack_unsupported(self): self._test(mock_field, None, None) def test_lbc_unpacked(self): - boundary_packing = _DummyBoundaryPacking( - x_halo=11, y_halo=7, rim_width=3 - ) + boundary_packing = _DummyBoundaryPacking(x_halo=11, y_halo=7, rim_width=3) mock_field = _DummyFieldWithSize( lbext=10, lblrec=200, @@ -424,15 +420,11 @@ def test_lbc_wgdos_unsupported(self): # Anything not None will do here. boundary_packing=0, ) - with self.assertRaisesRegex( - ValueError, "packed LBC data is not supported" - ): + with self.assertRaisesRegex(ValueError, "packed LBC data is not supported"): self._test(mock_field, None, None) def test_lbc_cray(self): - boundary_packing = _DummyBoundaryPacking( - x_halo=11, y_halo=7, rim_width=3 - ) + boundary_packing = _DummyBoundaryPacking(x_halo=11, y_halo=7, rim_width=3) mock_field = _DummyFieldWithSize( lbext=10, lblrec=200, @@ -526,17 +518,13 @@ def test__basic(self): def test__bad_lbtim(self): self.mock_field.lbtim = 717 ff2pp = FF2PP("dummy_filename") - with self.assertRaisesRegex( - ValueError, "LBTIM of 717, expected only 0 or 11" - ): + with self.assertRaisesRegex(ValueError, "LBTIM of 717, expected only 0 or 11"): ff2pp._adjust_field_for_lbc(self.mock_field) def test__bad_lbvc(self): self.mock_field.lbvc = 312 ff2pp = FF2PP("dummy_filename") - with self.assertRaisesRegex( - ValueError, "LBVC of 312, expected only 0 or 65" - ): + with self.assertRaisesRegex(ValueError, "LBVC of 312, expected only 0 or 65"): ff2pp._adjust_field_for_lbc(self.mock_field) @@ -548,9 +536,7 @@ def setUp(self): # Fake the level constants to look like 3 model levels. self.n_all_levels = 3 - self.mock_ff_header.level_dependent_constants = np.zeros( - (self.n_all_levels) - ) + self.mock_ff_header.level_dependent_constants = np.zeros((self.n_all_levels)) self.mock_ff = self.patch( "iris.fileformats._ff.FFHeader", return_value=self.mock_ff_header ) @@ -568,9 +554,7 @@ def _check_expected_levels(self, results, n_levels): self.assertEqual(results[0].lblev, self.original_lblev) else: self.assertEqual(len(results), n_levels) - self.assertEqual( - [fld.lblev for fld in results], list(range(n_levels)) - ) + self.assertEqual([fld.lblev for fld in results], list(range(n_levels))) def test__is_lbc(self): ff2pp = FF2PP("dummy_filename") diff --git a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py index cbbc81dd4b..1c20acd39d 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py @@ -20,9 +20,7 @@ class Test_grid(tests.IrisTest): def _header(self, grid_staggering): - with mock.patch.object( - FFHeader, "__init__", mock.Mock(return_value=None) - ): + with mock.patch.object(FFHeader, "__init__", mock.Mock(return_value=None)): header = FFHeader() header.grid_staggering = grid_staggering header.column_dependent_constants = mock.sentinel.column @@ -33,9 +31,7 @@ def _header(self, grid_staggering): def _test_grid_staggering(self, grid_staggering): header = self._header(grid_staggering) - with mock.patch.dict( - FFHeader.GRID_STAGGERING_CLASS, {grid_staggering: MyGrid} - ): + with mock.patch.dict(FFHeader.GRID_STAGGERING_CLASS, {grid_staggering: MyGrid}): grid = header.grid() self.assertIsInstance(grid, MyGrid) self.assertIs(grid.column, mock.sentinel.column) diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py index e71a31f10f..33f18d5a7a 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py @@ -11,10 +11,7 @@ # importing anything else. import iris.tests as tests # isort:skip -from iris.fileformats.name_loaders import ( - NAMECoord, - _build_lat_lon_for_NAME_timeseries, -) +from iris.fileformats.name_loaders import NAMECoord, _build_lat_lon_for_NAME_timeseries class TestCellMethods(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py index 2eea25a26d..ffaf6957ce 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py @@ -35,9 +35,7 @@ def test_cell_methods(self): self.patch("iris.fileformats.name_loaders._cf_height_from_name") self.patch("iris.cube.Cube") cubes = list( - _generate_cubes( - header, column_headings, coords, data_arrays, cell_methods - ) + _generate_cubes(header, column_headings, coords, data_arrays, cell_methods) ) cubes[0].assert_has_calls([mock.call.add_cell_method("cell_method_1")]) @@ -61,9 +59,7 @@ def _simulate_with_coords(self, names, values, dimensions): self.patch("iris.fileformats.name_loaders._cf_height_from_name") self.patch("iris.cube.Cube") - cubes = list( - _generate_cubes(header, column_headings, coords, data_arrays) - ) + cubes = list(_generate_cubes(header, column_headings, coords, data_arrays)) return cubes def test_non_circular(self): @@ -120,9 +116,7 @@ def _simulate_with_coords(self, names, values, dimensions): self.patch("iris.fileformats.name_loaders._cf_height_from_name") self.patch("iris.cube.Cube") - cubes = list( - _generate_cubes(header, column_headings, coords, data_arrays) - ) + cubes = list(_generate_cubes(header, column_headings, coords, data_arrays)) return cubes def test_time_dim(self): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index 906ba33f9c..455cb4f003 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -13,9 +13,7 @@ import iris.coord_systems as ics import iris.fileformats._nc_load_rules.helpers as hh -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) +from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions class Mixin__grid_mapping(Mixin__nc_load_actions): @@ -280,9 +278,7 @@ def check_result( else: expected_dim_coords += x_coords - self.assertEqual( - set(expected_dim_coords), set(cube.coords(dim_coords=True)) - ) + self.assertEqual(set(expected_dim_coords), set(cube.coords(dim_coords=True))) if cube_no_xycoords: self.assertEqual(expected_dim_coords, []) x_coord = None @@ -293,9 +289,7 @@ def check_result( self.assertEqual(len(y_coords), 1) (y_coord,) = y_coords - self.assertEqual( - set(expected_aux_coords), set(cube.coords(dim_coords=False)) - ) + self.assertEqual(set(expected_aux_coords), set(cube.coords(dim_coords=False))) if x_coord: if xco_stdname is None: @@ -406,9 +400,7 @@ def test_latlon_bad_gridmapping_varname(self): # Notes: # * behaviours all the same as 'test_bad_gridmapping_nameproperty' warning = "Missing.*grid mapping variable 'grid'" - result = self.run_testcase( - warning_regex=warning, gridmapvar_name="grid_2" - ) + result = self.run_testcase(warning_regex=warning, gridmapvar_name="grid_2") self.check_result(result, cube_no_cs=True) def test_latlon_bad_latlon_unit(self): @@ -453,9 +445,7 @@ def test_mapping_rotated(self): # * dim-coords identified : lat+lon # * coords built: lat+lon coords ROTATED, with coord-system # - "rotated" means that they have a different name + units - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON - ) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON) self.check_result(result, cube_cstype=ics.RotatedGeogCS) # @@ -482,9 +472,7 @@ def test_mapping_albers(self): self.check_result(result, cube_cstype=ics.AlbersEqualArea) def test_mapping_geostationary(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_GEOSTATIONARY - ) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_GEOSTATIONARY) self.check_result(result, cube_cstype=ics.Geostationary) def test_mapping_lambert_azimuthal(self): @@ -500,9 +488,7 @@ def test_mapping_lambert_conformal(self): self.check_result(result, cube_cstype=ics.LambertConformal) def test_mapping_mercator(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR - ) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR) self.check_result(result, cube_cstype=ics.Mercator) def test_mapping_stereographic(self): @@ -514,15 +500,11 @@ def test_mapping_polar_stereographic(self): self.check_result(result, cube_cstype=ics.PolarStereographic) def test_mapping_transverse_mercator(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_TRANSVERSE - ) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_TRANSVERSE) self.check_result(result, cube_cstype=ics.TransverseMercator) def test_mapping_vertical_perspective(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_VERTICAL - ) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_VERTICAL) self.check_result(result, cube_cstype=ics.VerticalPerspective) def test_mapping_unsupported(self): @@ -539,9 +521,7 @@ def test_mapping_unsupported(self): # * NO grid-mapping is identified (or coord-system built) # * There is no warning for this : it fails silently. # TODO: perhaps there _should_ be a warning in such cases ? - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_AZIMUTHAL - ) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_AZIMUTHAL) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) def test_mapping_undefined(self): @@ -717,9 +697,7 @@ def test_mapping__mismatch__nonll_coords_latlon_system(self): yco_name="projection_y", yco_units="m", ) - self.check_result( - result, cube_no_cs=True, xco_stdname=False, yco_stdname=False - ) + self.check_result(result, cube_no_cs=True, xco_stdname=False, yco_stdname=False) def test_mapping__mismatch__nonll_coords_rotated_system(self): # Rules Triggered: @@ -738,9 +716,7 @@ def test_mapping__mismatch__nonll_coords_rotated_system(self): yco_name="projection_y", yco_units="m", ) - self.check_result( - result, cube_no_cs=True, xco_stdname=False, yco_stdname=False - ) + self.check_result(result, cube_no_cs=True, xco_stdname=False, yco_stdname=False) def test_mapping__mismatch__nonll_coords_missing_system(self): # Rules Triggered: @@ -760,9 +736,7 @@ def test_mapping__mismatch__nonll_coords_missing_system(self): yco_name="projection_y", yco_units="m", ) - self.check_result( - result, cube_no_cs=True, xco_stdname=False, yco_stdname=False - ) + self.check_result(result, cube_no_cs=True, xco_stdname=False, yco_stdname=False) class Test__aux_latlons(Mixin__grid_mapping, tests.IrisTest): @@ -811,9 +785,7 @@ def test_aux_lat_and_lon(self): # * a grid-mapping is recognised, but discarded, as in this case # there are no dim-coords to reference it. result = self.run_testcase(xco_is_dim=False, yco_is_dim=False) - self.check_result( - result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=True - ) + self.check_result(result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=True) def test_aux_lon_rotated(self): # Rotated-style lat + lon coords, X is an aux-coord. @@ -873,9 +845,7 @@ def test_nondim_lats(self): # * in terms of rule triggering, this is not distinct from the # "normal" case : but latitude is now created as an aux-coord. warning = "must be.* monotonic" - result = self.run_testcase( - warning_regex=warning, yco_values=[0.0, 0.0] - ) + result = self.run_testcase(warning_regex=warning, yco_values=[0.0, 0.0]) self.check_result(result, yco_is_aux=True) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index 582ab7e200..ad310e9450 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -12,9 +12,7 @@ import iris.tests as tests # isort: skip import iris.fileformats._nc_load_rules.helpers as hh -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) +from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions class Test__formulae_tests(Mixin__nc_load_actions, tests.IrisTest): @@ -169,9 +167,7 @@ def test_missing_term(self): # 007 : fc_formula_type_atmosphere_hybrid_height_coordinate # 008 : fc_formula_term(a) # 009 : fc_formula_term(b) - result = self.run_testcase( - term_names=["a", "b"] # missing the 'orog' term - ) + result = self.run_testcase(term_names=["a", "b"]) # missing the 'orog' term self.check_result(result, formula_terms=["a", "b"]) def test_no_terms(self): @@ -224,9 +220,7 @@ def test_two_formulae(self): ) extra_type = "ocean_sigma_coordinate" - result = self.run_testcase( - extra_formula_type=extra_type, warning_regex=warning - ) + result = self.run_testcase(extra_formula_type=extra_type, warning_regex=warning) # NOTE: FOR NOW, check expected behaviour : only one factory will be # built, but there are coordinates (terms) for both types. # TODO: this is a bug and needs fixing : translation should handle @@ -240,72 +234,44 @@ def test_two_formulae(self): def test_atmosphere_sigma_coordinate(self): hybrid_type = "atmosphere_sigma_coordinate" term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) + result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) + self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) def test_atmosphere_hybrid_sigma_pressure_coordinate(self): hybrid_type = "atmosphere_hybrid_sigma_pressure_coordinate" term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) + result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) + self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) def test_ocean_sigma_z_coordinate(self): hybrid_type = "ocean_sigma_z_coordinate" term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) + result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) + self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) def test_ocean_sigma_coordinate(self): hybrid_type = "ocean_sigma_coordinate" term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) + result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) + self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) def test_ocean_s_coordinate(self): hybrid_type = "ocean_s_coordinate" term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) + result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) + self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) def test_ocean_s_coordinate_g1(self): hybrid_type = "ocean_s_coordinate_g1" term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) + result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) + self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) def test_ocean_s_coordinate_g2(self): hybrid_type = "ocean_s_coordinate_g2" term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) + result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) + self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py index a8c7d2cc5f..80908b2abd 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py @@ -13,9 +13,7 @@ import iris.tests as tests # isort: skip from iris.coord_systems import GeogCS, RotatedGeogCS -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) +from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions class Mixin_latlon_dimcoords(Mixin__nc_load_actions): @@ -172,9 +170,7 @@ def test_fullinfo_unrotated(self): units=self.unrotated_units, grid_mapping="latlon", ) - self.check_result( - result, self.unrotated_name, None, "degrees", "latlon" - ) + self.check_result(result, self.unrotated_name, None, "degrees", "latlon") def test_fullinfo_rotated(self): # Check behaviour with all normal info elements for 'rotated' case. @@ -184,9 +180,7 @@ def test_fullinfo_rotated(self): units=self.rotated_units, grid_mapping="rotated", ) - self.check_result( - result, self.rotated_name, None, "degrees", "rotated" - ) + self.check_result(result, self.rotated_name, None, "degrees", "rotated") def test_axis(self): # A suitable axis --> unrotated lat/lon coord, but unknown units. @@ -208,12 +202,8 @@ def test_units_rotated(self): def test_units_unrotated_gridmapping(self): # With an unrotated unit *AND* a suitable grid-mapping, we identify a # rotated latlon coordinate + assign it the coord-system. - result = self.run_testcase( - units=self.unrotated_units, grid_mapping="latlon" - ) - self.check_result( - result, self.unrotated_name, None, "degrees", "latlon" - ) + result = self.run_testcase(units=self.unrotated_units, grid_mapping="latlon") + self.check_result(result, self.unrotated_name, None, "degrees", "latlon") def test_units_rotated_gridmapping_noname(self): # Rotated units and grid-mapping, but *without* the expected name. @@ -232,9 +222,7 @@ def test_units_rotated_gridmapping_withname(self): units="degrees", grid_mapping="rotated", ) - self.check_result( - result, self.rotated_name, None, "degrees", "rotated" - ) + self.check_result(result, self.rotated_name, None, "degrees", "rotated") def test_units_rotated_gridmapping_varname(self): # Same but with var-name containing the standard-name : in this case we @@ -263,9 +251,7 @@ def test_varname_unrotated_units_rotated(self): # (= set standard-name). # N.B. this accepts "degrees" as a generic term, and so does *not* # interpret it as a rotated coordinate. - result = self.run_testcase( - var_name=self.unrotated_name, units="degrees" - ) + result = self.run_testcase(var_name=self.unrotated_name, units="degrees") self.check_result(result, self.unrotated_name, None, "degrees") def test_longname(self): @@ -289,9 +275,7 @@ def test_stdname_unrotated_gridmapping(self): result = self.run_testcase( standard_name=self.unrotated_name, grid_mapping="latlon" ) - self.check_result( - result, self.unrotated_name, None, "unknown", "latlon" - ) + self.check_result(result, self.unrotated_name, None, "unknown", "latlon") def test_stdname_rotated_gridmapping(self): # An *rotated* standard-name and grid-mapping, translates into a diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py index a1a93056cb..3d0a0017c8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py @@ -17,9 +17,7 @@ from iris.coords import AncillaryVariable, AuxCoord, CellMeasure from iris.fileformats.pp import STASH -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) +from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions class Test__ukmo_attributes(Mixin__nc_load_actions, tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index c27d2445e9..efd67e949a 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -12,9 +12,7 @@ import iris.tests as tests # isort: skip from iris.coords import AuxCoord, DimCoord -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) +from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions class Opts(dict): @@ -65,10 +63,7 @@ def _make_testcase_cdl( # NB we don't necessarily *use* either of these dims_and_lens = {timedim_name: 2, perioddim_name: 3} dims_string = "\n".join( - [ - f" {name} = {length} ;" - for name, length in dims_and_lens.items() - ] + [f" {name} = {length} ;" for name, length in dims_and_lens.items()] ) phenom_auto_dims = [] @@ -143,8 +138,7 @@ def _make_testcase_cdl( else: phenom_coords_string = " ".join(phenom_coords) phenom_coords_string = ( - " " - f'phenom:coordinates = "{phenom_coords_string}" ; ' + " " f'phenom:coordinates = "{phenom_coords_string}" ; ' ) # Create a testcase with time dims + coords. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py index 994d2958c2..a8926a0c59 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -55,9 +55,7 @@ def test_add_case_specific_fact__existingname(self): name = "this" self.assertEqual(engine.fact_list(name), [("that", "other")]) engine.add_case_specific_fact(name, ("yetanother",)) - self.assertEqual( - engine.fact_list(name), [("that", "other"), ("yetanother",)] - ) + self.assertEqual(engine.fact_list(name), [("that", "other"), ("yetanother",)]) def test_add_case_specific_fact__emptyargs(self): # Check that empty args work ok, and will create a new fact. @@ -70,10 +68,7 @@ def test_add_fact(self): # Check that 'add_fact' is equivalent to (short for) a call to # 'add_case_specific_fact'. engine = self.empty_engine - target = ( - "iris.fileformats._nc_load_rules.engine.Engine" - ".add_case_specific_fact" - ) + target = "iris.fileformats._nc_load_rules.engine.Engine.add_case_specific_fact" acsf_call = self.patch(target) engine.add_fact("extra", ()) self.assertEqual(acsf_call.call_count, 1) @@ -90,9 +85,7 @@ def test_get_kb(self): self.assertIs(kb, engine.facts) def test_fact_list__existing(self): - self.assertEqual( - self.nonempty_engine.fact_list("this"), [("that", "other")] - ) + self.assertEqual(self.nonempty_engine.fact_list("this"), [("that", "other")]) def test_fact_list__nonexisting(self): self.assertEqual(self.empty_engine.fact_list("odd-unknown"), []) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py index b6e9ba954c..1481f2c886 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py @@ -56,9 +56,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): ) else: gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) + expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) cf_grid_var = mock.Mock(spec=[], **gridvar_props) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py index fd500b4831..7cb7cbf897 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py @@ -128,9 +128,7 @@ def _check_case(self, dimension_names): build_auxiliary_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - self.expected_coord, [0, 1] - ) + self.engine.cube.add_aux_coord.assert_called_with(self.expected_coord, [0, 1]) # Test that engine.cube_parts container is correctly populated. expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] @@ -192,9 +190,7 @@ def patched__getitem__(proxy_self, keys): new=patched__getitem__, ): # While loading, "turn off" loading small variables as real data. - with mock.patch( - "iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0 - ): + with mock.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0): yield def test_scale_factor_add_offset_int(self): @@ -243,9 +239,7 @@ def setUp(self): scale_factor=1, add_offset=0, cf_name="wibble", - cf_data=mock.MagicMock( - chunking=mock.Mock(return_value=None), spec=[] - ), + cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), standard_name=None, long_name="wibble", units="days since 1970-01-01", diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py index 4f19d44a2a..8676ce4a4c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py @@ -114,9 +114,7 @@ def check_case_dim_coord_construction(self, climatology=False): build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, [0]) def test_dim_coord_construction(self): self.check_case_dim_coord_construction(climatology=False) @@ -148,9 +146,7 @@ def test_dim_coord_construction_masked_array(self): build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, [0]) # Assert warning is raised assert len(w) == 1 @@ -179,9 +175,7 @@ def test_dim_coord_construction_masked_array_mask_does_nothing(self): build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, [0]) # Assert no warning is raised assert len(w) == 0 @@ -205,9 +199,7 @@ def test_dim_coord_construction_masked_bounds_mask_does_nothing(self): build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, [0]) # Assert no warning is raised assert len(w) == 0 @@ -228,15 +220,11 @@ def test_aux_coord_construction(self): warning_patch = mock.patch("warnings.warn") # Asserts must lie within context manager because of deferred loading. - with ( - warning_patch - ), self.deferred_load_patch, self.get_cf_bounds_var_patch: + with warning_patch, self.deferred_load_patch, self.get_cf_bounds_var_patch: build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0] - ) + self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) self.assertIn( "creating 'wibble' auxiliary coordinate instead", warnings.warn.call_args[0][0], @@ -321,15 +309,11 @@ def test_slowest_varying_vertex_dim(self): build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, [0]) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual( - self.engine.cube_parts["coordinates"], expected_list - ) + self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) def test_fastest_varying_vertex_dim(self): bounds = np.arange(12).reshape(6, 2) @@ -353,15 +337,11 @@ def test_fastest_varying_vertex_dim(self): build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, [0]) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual( - self.engine.cube_parts["coordinates"], expected_list - ) + self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) def test_fastest_with_different_dim_names(self): # Despite the dimension names 'x' differing from the coord's @@ -388,15 +368,11 @@ def test_fastest_with_different_dim_names(self): build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, [0]) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual( - self.engine.cube_parts["coordinates"], expected_list - ) + self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) class TestCircular(tests.IrisTest, RulesTestMixin): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py index a3efcb0dc4..7fe95840b3 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py @@ -22,9 +22,7 @@ class TestBuildGeostationaryCoordinateSystem(tests.IrisTest): - def _test( - self, inverse_flattening=False, replace_props=None, remove_props=None - ): + def _test(self, inverse_flattening=False, replace_props=None, remove_props=None): """ Generic test that can check vertical perspective validity with or without inverse flattening. @@ -74,9 +72,7 @@ def test_false_offsets_missing(self): self._test(remove_props=["false_easting", "false_northing"]) def test_false_offsets_none(self): - self._test( - replace_props={"false_easting": None, "false_northing": None} - ) + self._test(replace_props={"false_easting": None, "false_northing": None}) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py index 8d5b46c6bb..93d84055ab 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py @@ -53,15 +53,11 @@ def _test(self, inverse_flattening=False, no_optionals=False): ) else: gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) + expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) cf_grid_var = mock.Mock(spec=[], **gridvar_props) - cs = build_lambert_azimuthal_equal_area_coordinate_system( - None, cf_grid_var - ) + cs = build_lambert_azimuthal_equal_area_coordinate_system(None, cf_grid_var) expected = LambertAzimuthalEqualArea( latitude_of_projection_origin=test_lat, diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py index 9232e146cb..d2d0659077 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py @@ -56,9 +56,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): ) else: gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) + expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) cf_grid_var = mock.Mock(spec=[], **gridvar_props) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index 4958eccbfd..cca3610925 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -16,9 +16,7 @@ import iris from iris.coord_systems import Mercator -from iris.fileformats._nc_load_rules.helpers import ( - build_mercator_coordinate_system, -) +from iris.fileformats._nc_load_rules.helpers import build_mercator_coordinate_system class TestBuildMercatorCoordinateSystem(tests.IrisTest): @@ -34,9 +32,7 @@ def test_valid(self): cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( - longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin - ), + longitude_of_projection_origin=(cf_grid_var.longitude_of_projection_origin), ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), @@ -56,9 +52,7 @@ def test_inverse_flattening(self): cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( - longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin - ), + longitude_of_projection_origin=(cf_grid_var.longitude_of_projection_origin), ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, inverse_flattening=cf_grid_var.inverse_flattening, @@ -97,9 +91,7 @@ def test_standard_parallel_missing(self): cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( - longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin - ), + longitude_of_projection_origin=(cf_grid_var.longitude_of_projection_origin), ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), @@ -118,9 +110,7 @@ def test_scale_factor_at_projection_origin(self): cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( - longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin - ), + longitude_of_projection_origin=(cf_grid_var.longitude_of_projection_origin), ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py index c377cf7d1b..26b6d30573 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py @@ -13,12 +13,7 @@ from iris import coord_systems from iris._deprecation import IrisDeprecation -from iris.coord_systems import ( - CoordSystem, - GeogCS, - ObliqueMercator, - RotatedMercator, -) +from iris.coord_systems import CoordSystem, GeogCS, ObliqueMercator, RotatedMercator from iris.fileformats._nc_load_rules.helpers import ( build_oblique_mercator_coordinate_system, ) @@ -139,16 +134,12 @@ class TestAttributes: ellipsoid=None, ) - @pytest.fixture( - autouse=True, params=kwarg_permutations, ids=permutation_ids - ) + @pytest.fixture(autouse=True, params=kwarg_permutations, ids=permutation_ids) def make_variant_inputs(self, request) -> None: """Parse a ParamTuple into usable test information.""" inputs: ParamTuple = request.param - self.nc_attributes = dict( - self.nc_attributes_default, **inputs.nc_attributes - ) + self.nc_attributes = dict(self.nc_attributes_default, **inputs.nc_attributes) self.expected_class = inputs.expected_class coord_system_kwargs_expected = dict( self.coord_system_kwargs_default, **inputs.coord_system_kwargs @@ -157,9 +148,7 @@ def make_variant_inputs(self, request) -> None: if self.expected_class is RotatedMercator: del coord_system_kwargs_expected["azimuth_of_central_line"] - self.coord_system_args_expected = list( - coord_system_kwargs_expected.values() - ) + self.coord_system_args_expected = list(coord_system_kwargs_expected.values()) def test_attributes(self): cf_var_mock = mock.Mock(spec=[], **self.nc_attributes) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index a483390e36..46c81242ad 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -42,9 +42,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): ) else: gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) + expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) if no_offsets: del gridvar_props["false_easting"] diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py index ae881259fe..77413adb19 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py @@ -42,9 +42,7 @@ def _test(self, inverse_flattening=False, no_options=False): ) else: gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) + expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) if no_options: del gridvar_props["false_easting"] @@ -59,12 +57,8 @@ def _test(self, inverse_flattening=False, no_options=False): cs = build_transverse_mercator_coordinate_system(None, cf_grid_var) expected = TransverseMercator( - latitude_of_projection_origin=( - cf_grid_var.latitude_of_projection_origin - ), - longitude_of_central_meridian=( - cf_grid_var.longitude_of_central_meridian - ), + latitude_of_projection_origin=(cf_grid_var.latitude_of_projection_origin), + longitude_of_central_meridian=(cf_grid_var.longitude_of_central_meridian), false_easting=test_easting, false_northing=test_northing, scale_factor_at_central_meridian=test_scale_factor, diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index 9dc31ba490..20b9a7347f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -11,9 +11,7 @@ from unittest import mock import warnings -from iris.fileformats._nc_load_rules.helpers import ( - has_supported_mercator_parameters, -) +from iris.fileformats._nc_load_rules.helpers import has_supported_mercator_parameters # import iris tests first so that some things can be initialised before # importing anything else @@ -119,8 +117,7 @@ def test_invalid_scale_factor_and_standard_parallel(self): self.assertEqual(len(warns), 1) self.assertRegex( str(warns[0]), - "both " - '"scale_factor_at_projection_origin" and "standard_parallel"', + "both " '"scale_factor_at_projection_origin" and "standard_parallel"', ) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py index faffefd8f2..3bcf14e0b3 100755 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py @@ -41,9 +41,7 @@ def test_valid_base_north(self): ) engine = _engine(cf_grid_var, cf_name) - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertTrue(is_valid) @@ -61,9 +59,7 @@ def test_valid_base_south(self): ) engine = _engine(cf_grid_var, cf_name) - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertTrue(is_valid) @@ -81,9 +77,7 @@ def test_valid_straight_vertical_longitude(self): ) engine = _engine(cf_grid_var, cf_name) - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertTrue(is_valid) @@ -101,9 +95,7 @@ def test_valid_false_easting_northing(self): ) engine = _engine(cf_grid_var, cf_name) - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertTrue(is_valid) @@ -121,9 +113,7 @@ def test_valid_standard_parallel(self): ) engine = _engine(cf_grid_var, cf_name) - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertTrue(is_valid) @@ -141,9 +131,7 @@ def test_valid_scale_factor(self): ) engine = _engine(cf_grid_var, cf_name) - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertTrue(is_valid) @@ -166,16 +154,13 @@ def test_invalid_scale_factor_and_standard_parallel(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertFalse(is_valid) self.assertEqual(len(warns), 1) self.assertRegex( str(warns[0]), - "both " - '"scale_factor_at_projection_origin" and "standard_parallel"', + "both " '"scale_factor_at_projection_origin" and "standard_parallel"', ) def test_absent_scale_factor_and_standard_parallel(self): @@ -195,9 +180,7 @@ def test_absent_scale_factor_and_standard_parallel(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertFalse(is_valid) self.assertEqual(len(warns), 1) @@ -225,9 +208,7 @@ def test_invalid_latitude_of_projection_origin(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") - is_valid = has_supported_polar_stereographic_parameters( - engine, cf_name - ) + is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) self.assertFalse(is_valid) self.assertEqual(len(warns), 1) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index a58413d399..f3908461c7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -34,9 +34,7 @@ def test_with_interval(self): "time: variance (interval: 1 hr)", "time : variance (interval: 1 hr)", ] - expected = ( - CellMethod(method="variance", coords="time", intervals="1 hr"), - ) + expected = (CellMethod(method="variance", coords="time", intervals="1 hr"),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) self.assertEqual(res, expected) @@ -48,9 +46,7 @@ def test_multiple_axes(self): "lat : lon: standard_deviation", "lat : lon : standard_deviation", ] - expected = ( - CellMethod(method="standard_deviation", coords=["lat", "lon"]), - ) + expected = (CellMethod(method="standard_deviation", coords=["lat", "lon"]),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) self.assertEqual(res, expected) @@ -149,9 +145,7 @@ def test_portions_of_cells(self): "area: mean where sea_ice over sea", "area : mean where sea_ice over sea", ] - expected = ( - CellMethod(method="mean where sea_ice over sea", coords="area"), - ) + expected = (CellMethod(method="mean where sea_ice over sea", coords="area"),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) self.assertEqual(res, expected) diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py index 7249c39829..221ee30376 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py @@ -133,9 +133,9 @@ def test_invalid_var_name(tmp_filepath, save_cubelist_with_sigma): def test_control_multiple(tmp_filepath, save_cubelist_with_sigma): cube_varname, sigma_varname = save_cubelist_with_sigma - with CHUNK_CONTROL.set( - cube_varname, model_level_number=2 - ), CHUNK_CONTROL.set(sigma_varname, model_level_number=3): + with CHUNK_CONTROL.set(cube_varname, model_level_number=2), CHUNK_CONTROL.set( + sigma_varname, model_level_number=3 + ): cubes = CubeList(loader.load_cubes(tmp_filepath)) cube = cubes.extract_cube(cube_varname) assert cube.shape == (3, 4, 5, 6) diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py index caece8b6bc..9aa696bccd 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py @@ -23,9 +23,7 @@ def setUp(self): self.shape = (300000, 240, 200) self.expected_chunks = _optimum_chunksize(self.shape, self.shape) - def _make( - self, chunksizes=None, shape=None, dtype="i4", **extra_properties - ): + def _make(self, chunksizes=None, shape=None, dtype="i4", **extra_properties): cf_data = mock.MagicMock( _FillValue=None, __getitem__="", diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py index 01c6838241..eacdee2782 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py @@ -27,9 +27,7 @@ def setUp(self): self.ps = mock.MagicMock(units="units") coordinates = [(mock.sentinel.b, "b"), (self.ps, "ps")] self.cube_parts = dict(coordinates=coordinates) - self.engine = mock.Mock( - requires=self.requires, cube_parts=self.cube_parts - ) + self.engine = mock.Mock(requires=self.requires, cube_parts=self.cube_parts) self.cube = mock.create_autospec(Cube, spec_set=True, instance=True) # Patch out the check_dependencies functionality. func = "iris.aux_factory.HybridPressureFactory._check_dependencies" @@ -61,9 +59,7 @@ def test_formula_terms_a_p0(self): long_name="vertical pressure", var_name="ap", ) - self.cube_parts["coordinates"].extend( - [(coord_a, "a"), (coord_p0, "p0")] - ) + self.cube_parts["coordinates"].extend([(coord_a, "a"), (coord_p0, "p0")]) self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") _load_aux_factory(self.engine, self.cube) # Check cube.coord_dims method. @@ -97,9 +93,7 @@ def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless( long_name="vertical pressure", var_name="ap", ) - self.cube_parts["coordinates"].extend( - [(coord_a, "a"), (coord_p0, "p0")] - ) + self.cube_parts["coordinates"].extend([(coord_a, "a"), (coord_p0, "p0")]) self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") _load_aux_factory(self.engine, self.cube) # Check cube.coord_dims method. @@ -133,9 +127,7 @@ def test_formula_terms_p0_non_scalar(self): def test_formula_terms_p0_bounded(self): coord_a = DimCoord(np.arange(5)) coord_p0 = DimCoord(1, bounds=[0, 2], var_name="p0") - self.cube_parts["coordinates"].extend( - [(coord_a, "a"), (coord_p0, "p0")] - ) + self.cube_parts["coordinates"].extend([(coord_a, "a"), (coord_p0, "p0")]) self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") with warnings.catch_warnings(record=True) as warn: warnings.simplefilter("always") @@ -165,7 +157,7 @@ def test_formula_terms_ap_missing_coords(self): with mock.patch("warnings.warn") as warn: _load_aux_factory(self.engine, self.cube) warn.assert_called_once_with( - "Unable to find coordinate for variable " "'ap'", + "Unable to find coordinate for variable 'ap'", category=IrisFactoryCoordNotFoundWarning, ) self._check_no_delta() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py index 2522392c21..0bd4966944 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py @@ -12,9 +12,7 @@ import iris from iris.fileformats.cf import CFDataVariable -from iris.fileformats.netcdf.loader import ( - _translate_constraints_to_var_callback, -) +from iris.fileformats.netcdf.loader import _translate_constraints_to_var_callback # import iris tests first so that some things can be initialised before # importing anything else @@ -90,9 +88,7 @@ def test_NameConstraint_standard_name_long_name_var_name(self): self.assertArrayEqual(result, [False, False, False, True, False]) def test_NameConstraint_with_STASH(self): - constr = iris.NameConstraint( - standard_name="x_wind", STASH="m01s00i024" - ) + constr = iris.NameConstraint(standard_name="x_wind", STASH="m01s00i024") result = _translate_constraints_to_var_callback(constr) self.assertIsNone(result) diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 77c1da2d1c..7abb73ae52 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -209,12 +209,8 @@ def test_default_units(self): self.assertEqual(cubes[0].units, as_unit("unknown")) self.assertEqual(cubes[0].coord("y").units, as_unit("unknown")) self.assertEqual(cubes[0].coord("x").units, as_unit(1)) - self.assertEqual( - cubes[0].ancillary_variable("refs").units, as_unit("unknown") - ) - self.assertEqual( - cubes[0].cell_measure("areas").units, as_unit("unknown") - ) + self.assertEqual(cubes[0].ancillary_variable("refs").units, as_unit("unknown")) + self.assertEqual(cubes[0].cell_measure("areas").units, as_unit("unknown")) class TestsMesh(tests.IrisTest): @@ -280,9 +276,7 @@ def test_standard_dims(self): self.assertIsNotNone(cube.coords("levels")) def test_mesh_coord(self): - cube = [ - cube for cube in self.mesh_cubes if cube.var_name == "face_data" - ][0] + cube = [cube for cube in self.mesh_cubes if cube.var_name == "face_data"][0] face_x = cube.coord("longitude") face_y = cube.coord("latitude") diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index e4d8488a76..38c4fff32d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -244,9 +244,7 @@ def test_least_significant_digit(self): with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, least_significant_digit=1) cube_saved = iris.load_cube(nc_path) - self.assertEqual( - cube_saved.attributes["least_significant_digit"], 1 - ) + self.assertEqual(cube_saved.attributes["least_significant_digit"], 1) self.assertFalse(np.all(cube.data == cube_saved.data)) self.assertArrayAllClose(cube.data, cube_saved.data, 0.1) @@ -456,9 +454,7 @@ def test_valid_range_saved(self): with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual( - ds.variables["longitude"].valid_range, vrange - ) + self.assertArrayEqual(ds.variables["longitude"].valid_range, vrange) ds.close() def test_valid_min_saved(self): @@ -641,9 +637,7 @@ def test_leading_invalid(self): def test_no_hyphen(self): # CF explicitly prohibits hyphen, even though it is fine in NetCDF. - self.assertEqual( - Saver.cf_valid_var_name("valid-netcdf"), "valid_netcdf" - ) + self.assertEqual(Saver.cf_valid_var_name("valid-netcdf"), "valid_netcdf") class _Common__check_attribute_compliance: @@ -789,9 +783,7 @@ def test_valid_range_and_valid_min_valid_max_provided(self): msg = 'Both "valid_range" and "valid_min"' with Saver("nonexistent test file", "NETCDF4") as saver: with self.assertRaisesRegex(ValueError, msg): - saver.check_attribute_compliance( - self.container, self.data_dtype - ) + saver.check_attribute_compliance(self.container, self.data_dtype) class Test__cf_coord_identity(tests.IrisTest): @@ -878,9 +870,7 @@ def setncattr(self, name, attr): Saver._create_cf_grid_mapping(saver, cube, variable) self.assertEqual(create_var_fn.call_count, 1) - self.assertEqual( - variable.grid_mapping, grid_variable.grid_mapping_name - ) + self.assertEqual(variable.grid_mapping, grid_variable.grid_mapping_name) return grid_variable def _variable_attributes(self, coord_system): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index 2e7091c43b..c22b24eedc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -34,9 +34,7 @@ class Test__create_cf_bounds(test_Saver.Test__create_cf_bounds): def climatology_3d(): cube = stock.climatology_3d() aux_coord = AuxCoord.from_coord(cube.coord("time")) - lazy_coord = aux_coord.copy( - aux_coord.lazy_points(), aux_coord.lazy_bounds() - ) + lazy_coord = aux_coord.copy(aux_coord.lazy_points(), aux_coord.lazy_bounds()) cube.replace_coord(lazy_coord) return cube @@ -102,9 +100,7 @@ def test_lazy_streamed_data(self): def test_lazy_streamed_coord(self): aux_coord = AuxCoord.from_coord(self.cube.coords()[0]) - lazy_coord = aux_coord.copy( - aux_coord.lazy_points(), aux_coord.lazy_bounds() - ) + lazy_coord = aux_coord.copy(aux_coord.lazy_points(), aux_coord.lazy_bounds()) self.cube.replace_coord(lazy_coord) self.save_common(self.cube) self.assertTrue(self.store_watch.called) diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index 10c5dbecf4..d16205072a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -31,9 +31,7 @@ def saver_patch(): mock_dataset = mock.MagicMock() mock_dataset_class = mock.Mock(return_value=mock_dataset) # Mock the wrapper within the netcdf saver - target1 = ( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper" - ) + target1 = "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper" # Mock the real netCDF4.Dataset within the threadsafe-nc module, as this is # used by NetCDFDataProxy and NetCDFWriteProxy. target2 = "iris.fileformats.netcdf._thread_safe_nc.netCDF4.Dataset" @@ -58,18 +56,14 @@ def data_form(request) -> bool: @staticmethod def saver(compute) -> Saver: # Create a test Saver object - return Saver( - filename="", netcdf_format="NETCDF4", compute=compute - ) + return Saver(filename="", netcdf_format="NETCDF4", compute=compute) @staticmethod def mock_var(shape, with_data_array): # Create a test cf_var object. # N.B. using 'spec=' so we can control whether it has a '_data_array' property. if with_data_array: - extra_properties = { - "_data_array": mock.sentinel.initial_data_array - } + extra_properties = {"_data_array": mock.sentinel.initial_data_array} else: extra_properties = {} mock_cfvar = mock.MagicMock( diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 8177e0c299..fb51a123e4 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -89,8 +89,7 @@ def apply_xyargs(coords, xyargs): np.zeros((n_edges, 2), np.int32), cf_role="edge_node_connectivity" ) edge_coords = [ - AuxCoord(np.arange(n_edges), standard_name=name) - for name in XY_NAMES + AuxCoord(np.arange(n_edges), standard_name=name) for name in XY_NAMES ] apply_xyargs(edge_coords, edgecoord_xyargs) @@ -100,8 +99,7 @@ def apply_xyargs(coords, xyargs): np.zeros((n_faces, 4), np.int32), cf_role="face_node_connectivity" ) face_coords = [ - AuxCoord(np.arange(n_faces), standard_name=name) - for name in XY_NAMES + AuxCoord(np.arange(n_faces), standard_name=name) for name in XY_NAMES ] apply_xyargs(face_coords, facecoord_xyargs) @@ -114,9 +112,7 @@ def apply_xyargs(coords, xyargs): else: loc_from, loc_to, _ = role.split("_") dims = [mesh_dims[loc] for loc in (loc_from, loc_to)] - conn = Connectivity( - np.zeros(dims, dtype=np.int32), cf_role=role - ) + conn = Connectivity(np.zeros(dims, dtype=np.int32), cf_role=role) connectivities[role] = conn applyargs(conn, kwargs) @@ -150,12 +146,8 @@ def make_mesh(basic=True, **kwargs): use_kwargs = dict( n_nodes=5, n_faces=2, - nodecoord_xyargs=tuple( - dict(var_name=f"node_{loc}") for loc in XY_LOCS - ), - facecoord_xyargs=tuple( - dict(var_name=f"face_{loc}") for loc in XY_LOCS - ), + nodecoord_xyargs=tuple(dict(var_name=f"node_{loc}") for loc in XY_LOCS), + facecoord_xyargs=tuple(dict(var_name=f"face_{loc}") for loc in XY_LOCS), mesh_kwargs=dict( var_name="Mesh2d", node_dimension="Mesh2d_nodes", @@ -290,9 +282,7 @@ def check_attrs_match(attrs): return result varsdict = { - name: attrs - for name, attrs in varsdict.items() - if check_attrs_match(attrs) + name: attrs for name, attrs in varsdict.items() if check_attrs_match(attrs) } return varsdict @@ -398,9 +388,7 @@ def test_basic_mesh(self): # The face coords should both map that single dim. face_dim = vars_meshdim(vars, "face") - self.assertTrue( - all(vars[co][_VAR_DIMS] == [face_dim] for co in face_coords) - ) + self.assertTrue(all(vars[co][_VAR_DIMS] == [face_dim] for co in face_coords)) # The face coordinates should be referenced by the data variable. for coord in face_coords: @@ -420,9 +408,7 @@ def test_basic_mesh(self): # There are no edges. self.assertNotIn("edge_node_connectivity", mesh_props) - self.assertEqual( - len(vars_w_props(vars, cf_role="edge_node_connectivity")), 0 - ) + self.assertEqual(len(vars_w_props(vars, cf_role="edge_node_connectivity")), 0) # The dims are precisely (nodes, faces, nodes-per-face), in that order. self.assertEqual( @@ -511,12 +497,8 @@ def test_multi_cubes_equal_meshes(self): self.assertEqual(sorted(mesh_names), ["Mesh2d"]) # same dimensions - self.assertEqual( - vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes" - ) - self.assertEqual( - vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces" - ) + self.assertEqual(vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes") + self.assertEqual(vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces") # there are exactly two data-variables with a 'mesh' property mesh_datavars = vars_w_props(vars, mesh="*") @@ -642,9 +624,7 @@ def test_nonmesh_hybrid_dim(self): # have just 1 data-variable, which maps to hybrid-height and mesh dims ((data_name, data_props),) = vars_w_props(vars, mesh="*").items() - self.assertEqual( - data_props[_VAR_DIMS], ["model_level_number", face_dim] - ) + self.assertEqual(data_props[_VAR_DIMS], ["model_level_number", face_dim]) self.assertEqual(data_props["mesh"], mesh_name) self.assertEqual(data_props["location"], "face") @@ -849,9 +829,7 @@ def test_nonuniform_connectivity(self): # check that the connectivity has the corrects dims and fill-property ff_props = vars[ff_conn_name] - self.assertEqual( - ff_props[_VAR_DIMS], ["Mesh2d_faces", "Mesh2d_face_N_faces"] - ) + self.assertEqual(ff_props[_VAR_DIMS], ["Mesh2d_faces", "Mesh2d_face_N_faces"]) self.assertIn("_FillValue", ff_props) self.assertEqual(ff_props["_FillValue"], -1) @@ -874,9 +852,7 @@ def test_nonuniform_connectivity(self): def test_one_dimensional(self): # Test a mesh with edges only. - mesh = make_mesh( - n_edges=5, n_faces=0, mesh_kwargs={"var_name": "Mesh1d"} - ) + mesh = make_mesh(n_edges=5, n_faces=0, mesh_kwargs={"var_name": "Mesh1d"}) # Save and snapshot the result tempfile_path = self.check_save_mesh(mesh) @@ -1211,12 +1187,8 @@ def _check_two_different_meshes(self, vars): # they use different dimensions # mesh1 - self.assertEqual( - vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes" - ) - self.assertEqual( - vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces" - ) + self.assertEqual(vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes") + self.assertEqual(vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces") if "edge_coordinates" in vars["Mesh2d"]: self.assertEqual( vars_meshdim(vars, "edge", mesh_name="Mesh2d"), "Mesh2d_edge" diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py index 9068837b2c..c077c81f20 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py @@ -41,9 +41,7 @@ def _call_target(self, fill_value, keys, vals): results = results.compute() # Return a named tuple, for named-property access to the 2 result values. - result = collections.namedtuple("_", ["is_masked", "contains_value"])( - *results - ) + result = collections.namedtuple("_", ["is_masked", "contains_value"])(*results) return result def test_no_fill_value_not_masked(self): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py index 42119094a7..d1ffb56a28 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -12,16 +12,11 @@ from iris.exceptions import IrisSaverFillValueWarning from iris.fileformats.netcdf._thread_safe_nc import default_fillvals -from iris.fileformats.netcdf.saver import ( - _fillvalue_report, - _FillvalueCheckInfo, -) +from iris.fileformats.netcdf.saver import _fillvalue_report, _FillvalueCheckInfo class Test__fillvaluereport: - @pytest.mark.parametrize( - "is_bytes", [True, False], ids=["ByteData", "NonbyteData"] - ) + @pytest.mark.parametrize("is_bytes", [True, False], ids=["ByteData", "NonbyteData"]) @pytest.mark.parametrize( "is_masked", [True, False], ids=["MaskedData", "NonmaskedData"] ) @@ -31,9 +26,7 @@ class Test__fillvaluereport: @pytest.mark.parametrize( "given_user_fv", [True, False], ids=["WithUserfill", "NoUserfill"] ) - def test_fillvalue_checking( - self, is_bytes, is_masked, contains_fv, given_user_fv - ): + def test_fillvalue_checking(self, is_bytes, is_masked, contains_fv, given_user_fv): dtype_code = "u1" if is_bytes else "f4" dtype = np.dtype(dtype_code) if given_user_fv: @@ -54,7 +47,9 @@ def test_fillvalue_checking( if is_bytes and is_masked and not given_user_fv: msg_fragment = "'' contains byte data with masked points" elif contains_fv: - msg_fragment = "'' contains unmasked data points equal to the fill-value" + msg_fragment = ( + "'' contains unmasked data points equal to the fill-value" + ) else: msg_fragment = None @@ -89,7 +84,9 @@ def test_warn(self, has_collision): # Check results if has_collision: # Check that we get the expected warning - expected_msg = "'' contains unmasked data points equal to the fill-value" + expected_msg = ( + "'' contains unmasked data points equal to the fill-value" + ) # Enter a warnings context that checks for the error. warning_context = pytest.warns( IrisSaverFillValueWarning, match=expected_msg @@ -97,9 +94,7 @@ def test_warn(self, has_collision): warning_context.__enter__() else: # Check that we get NO warning of the expected type. - warnings.filterwarnings( - "error", category=IrisSaverFillValueWarning - ) + warnings.filterwarnings("error", category=IrisSaverFillValueWarning) # Do call: it should raise AND return a warning, ONLY IF there was a collision. result = _fillvalue_report( diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index 620bc64461..d4c3826549 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -19,12 +19,7 @@ from iris.coords import AuxCoord, DimCoord from iris.cube import Cube, CubeList from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD -from iris.fileformats.netcdf import ( - CF_CONVENTIONS_VERSION, - Saver, - _thread_safe_nc, - save, -) +from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION, Saver, _thread_safe_nc, save from iris.tests.stock import lat_lon_cube from iris.tests.stock.mesh import sample_mesh_cube @@ -336,9 +331,7 @@ def test_string_dim_varname_collision(self): # NOTE: it *should* be possible for a cube with string data to cause # this collision, but cubes with string data are currently not working. # See : https://github.com/SciTools/iris/issues/4412 - x_dim = AuxCoord( - ["this", "that"], long_name="dim_x", var_name="string_auxco" - ) + x_dim = AuxCoord(["this", "that"], long_name="dim_x", var_name="string_auxco") cube_2.add_aux_coord(x_dim, 0) cube_1 = Cube([0], long_name="cube_1", var_name="string4") # Test save + loadback @@ -385,9 +378,7 @@ def mock_saver_class_create(*args, **kwargs): return mock_new_saver_call(*args, **kwargs) # Patch the Saver() creation to return our mock Saver object. - with mock.patch( - "iris.fileformats.netcdf.saver.Saver", mock_saver_class_create - ): + with mock.patch("iris.fileformats.netcdf.saver.Saver", mock_saver_class_create): # Return mocks for both constructor call, and Saver object. yield mock_new_saver_call, mock_saver @@ -399,9 +390,7 @@ def mock_saver_args(): # A special object for the cube, since cube.attributes must be indexable mock_cube = mock.MagicMock() - args = namedtuple( - "saver_args", ["cube", "filename", "format", "compute"] - )( + args = namedtuple("saver_args", ["cube", "filename", "format", "compute"])( cube=mock_cube, filename=mock.sentinel.filepath, format=mock.sentinel.netcdf4, @@ -440,9 +429,7 @@ def test_compute_true(self, mock_saver_creation, mock_saver_args): # Result should be None assert result is None - def test_compute_false_result_delayed( - self, mock_saver_creation, mock_saver_args - ): + def test_compute_false_result_delayed(self, mock_saver_creation, mock_saver_args): # Check operation when compute=False. mock_saver_new, mock_saver = mock_saver_creation args = mock_saver_args diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py index c15a721ad3..1f158ce107 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py @@ -44,9 +44,7 @@ def test_null(self): self._call_units(units_str="m") self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "m") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) def test_times32(self): with mock.patch("warnings.warn") as warn: @@ -55,9 +53,7 @@ def test_times32(self): ) self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "mm/hr") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) self.assertEqual(self.cube.data.dtype, np.float32) def test_visibility_units(self): @@ -68,9 +64,7 @@ def test_visibility_units(self): ) self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "m") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) self.assertEqual(self.cube.data.dtype, np.float32) def test_power_in_units(self): @@ -80,9 +74,7 @@ def test_power_in_units(self): ) self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "mm") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) self.assertEqual(self.cube.data.dtype, np.float32) def test_ug_per_m3_units(self): @@ -93,9 +85,7 @@ def test_ug_per_m3_units(self): ) self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "ug/m3") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) self.assertEqual(self.cube.data.dtype, np.float32) def test_g_per_kg(self): @@ -105,9 +95,7 @@ def test_g_per_kg(self): ) self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "kg/kg") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) self.assertEqual(self.cube.data.dtype, np.float32) def test_unit_expection_dictionary(self): @@ -115,9 +103,7 @@ def test_unit_expection_dictionary(self): self._call_units(units_str="mb") self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "hPa") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) self.assertEqual(self.cube.data.dtype, np.float32) def test_per_second(self): @@ -125,9 +111,7 @@ def test_per_second(self): self._call_units(units_str="/s") self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "s^-1") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) self.assertEqual(self.cube.data.dtype, np.float32) def test_unhandled_unit(self): @@ -135,9 +119,7 @@ def test_unhandled_unit(self): self._call_units(units_str="kittens") self.assertEqual(warn.call_count, 1) self.assertEqual(self.cube.units, "") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) + self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) self.assertEqual(self.cube.data.dtype, np.float32) self.assertEqual(self.cube.attributes["invalid_units"], "kittens") diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py index 4f1b948839..41f8fdfabb 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py @@ -51,16 +51,12 @@ def _call_vertical_coord( if reference_vertical_coord: self.field.reference_vertical_coord = reference_vertical_coord if reference_vertical_coord_type: - self.field.reference_vertical_coord_type = ( - reference_vertical_coord_type - ) + self.field.reference_vertical_coord_type = reference_vertical_coord_type vertical_coord(self.cube, self.field) def test_unhandled(self): with mock.patch("warnings.warn") as warn: - self._call_vertical_coord( - vertical_coord_val=1.0, vertical_coord_type=-1 - ) + self._call_vertical_coord(vertical_coord_val=1.0, vertical_coord_type=-1) warn.assert_called_once_with( "Vertical coord -1 not yet handled", category=TranslationWarning ) @@ -73,9 +69,7 @@ def test_null(self): def test_ground_level(self): with mock.patch("warnings.warn") as warn: - self._call_vertical_coord( - vertical_coord_val=9999.0, vertical_coord_type=0 - ) + self._call_vertical_coord(vertical_coord_val=9999.0, vertical_coord_type=0) self.assertEqual(warn.call_count, 0) diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index 1a49f57712..de7c2b1ba5 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -100,9 +100,7 @@ def test_masked_mdi_value_warning(self): field = DummyPPField()._ready_for_save() field.bmdi = -123.4 # Make float32 data, as float64 default produces an extra warning. - field.data = np.ma.masked_array( - [1.0, field.bmdi, 3.0], dtype=np.float32 - ) + field.data = np.ma.masked_array([1.0, field.bmdi, 3.0], dtype=np.float32) msg = "PPField data contains unmasked points" with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): with self.temp_filename(".pp") as temp_filename: @@ -160,9 +158,7 @@ def _check_cs(self, bplat, bplon, rotated): field = DummyPPField() field.bplat = bplat field.bplon = bplon - with mock.patch( - "iris.fileformats.pp.iris.coord_systems" - ) as mock_cs_mod: + with mock.patch("iris.fileformats.pp.iris.coord_systems") as mock_cs_mod: result = field.coord_system() if not rotated: # It should return a standard unrotated CS. diff --git a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py index cca9bb4641..51b5c5732f 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py +++ b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py @@ -25,9 +25,7 @@ def test_single_stash(self): self.assertTrue(pp_filter(stcube)) def test_stash_object(self): - constraint = iris.AttributeConstraint( - STASH=STASH.from_msi("m01s03i236") - ) + constraint = iris.AttributeConstraint(STASH=STASH.from_msi("m01s03i236")) pp_filter = _convert_constraints(constraint) stcube = mock.Mock(stash=STASH.from_msi("m01s03i236")) self.assertTrue(pp_filter(stcube)) diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py index 45635af391..4e2ef616b2 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py @@ -25,9 +25,7 @@ @pytest.mark.parametrize( "expected_shape", [(2, 3), (3, 2), (1, 3), (2, 2), (3, 3), (2, 4)] ) -@pytest.mark.parametrize( - "data_type", [np.float32, np.int32, np.int16, np.int8] -) +@pytest.mark.parametrize("data_type", [np.float32, np.int32, np.int16, np.int8]) def test_data_padding__no_compression(data_shape, expected_shape, data_type): data = np.empty(data_shape, dtype=data_type) @@ -59,9 +57,7 @@ def test_data_padding__no_compression(data_shape, expected_shape, data_type): _ = pp._data_bytes_to_shaped_array(*args) -class Test__data_bytes_to_shaped_array__lateral_boundary_compression( - tests.IrisTest -): +class Test__data_bytes_to_shaped_array__lateral_boundary_compression(tests.IrisTest): def setUp(self): self.data_shape = 30, 40 y_halo, x_halo, rim = 2, 3, 4 @@ -75,18 +71,12 @@ def setUp(self): y_halo + rim : -(y_halo + rim), x_halo + rim : -(x_halo + rim) ] = True - self.decompressed = ma.masked_array( - decompressed, mask=decompressed_mask - ) + self.decompressed = ma.masked_array(decompressed, mask=decompressed_mask) self.north = decompressed[-(y_halo + rim) :, :] - self.east = decompressed[ - y_halo + rim : -(y_halo + rim), -(x_halo + rim) : - ] + self.east = decompressed[y_halo + rim : -(y_halo + rim), -(x_halo + rim) :] self.south = decompressed[: y_halo + rim, :] - self.west = decompressed[ - y_halo + rim : -(y_halo + rim), : x_halo + rim - ] + self.west = decompressed[y_halo + rim : -(y_halo + rim), : x_halo + rim] # Get the bytes of the north, east, south, west arrays combined. buf = io.BytesIO() diff --git a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py index aa03c068ce..dfecd6aa2a 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py +++ b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py @@ -65,7 +65,7 @@ def test_fix_lbrow_lbnpt_no_mask_available(self): warn_msg = warn.call_args[0][0] self.assertTrue( warn_msg.startswith( - "Landmask compressed fields " "existed without a landmask" + "Landmask compressed fields existed without a landmask" ), "Unexpected warning message: {!r}".format(warn_msg), ) @@ -73,9 +73,7 @@ def test_fix_lbrow_lbnpt_no_mask_available(self): def test_deferred_mask_field(self): # Check that the order of the load is yielded last if the mask # hasn't yet been seen. - result = list( - pp._interpret_fields([self.pp_field, self.land_mask_field]) - ) + result = list(pp._interpret_fields([self.pp_field, self.land_mask_field])) self.assertEqual(result, [self.land_mask_field, self.pp_field]) def test_not_deferred_mask_field(self): @@ -124,9 +122,7 @@ def test_landsea_unpacking_uses_dask(self): mask_data_name = mask_toplev_item[1] # Check that the item this refers to is a PPDataProxy. - self.assertIsInstance( - lazy_mask_array.dask[mask_data_name], pp.PPDataProxy - ) + self.assertIsInstance(lazy_mask_array.dask[mask_data_name], pp.PPDataProxy) # Check that the soil-temp graph references the *same* lazy element, # showing that the mask+data calculation is handled by dask. diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index f49d389841..2482501ef4 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -150,9 +150,7 @@ def test_name_units_to_lbfc(self): def test_bad_name_units_to_lbfc_0(self): # Check that badly-formed / unrecognised cases yield LBFC == 0. - self.check_cube_name_units_yields_lbfc( - "sea_ice_temperature", "degC", 0 - ) + self.check_cube_name_units_yields_lbfc("sea_ice_temperature", "degC", 0) self.check_cube_name_units_yields_lbfc("Junk_Name", "K", 0) @@ -238,9 +236,7 @@ def test_t1_time_mean(self): tc = cube.coord(axis="t") expected = tc.units.num2date(0) - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: verify(cube, pp_field) actual = pp_field.t1 @@ -251,9 +247,7 @@ def test_t1_no_time_mean(self): tc = cube.coord(axis="t") expected = tc.units.num2date(15) - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: verify(cube, pp_field) actual = pp_field.t1 @@ -264,9 +258,7 @@ def test_t2_time_mean(self): tc = cube.coord(axis="t") expected = tc.units.num2date(30) - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: verify(cube, pp_field) actual = pp_field.t2 @@ -276,9 +268,7 @@ def test_t2_no_time_mean(self): cube = _get_single_time_cube(set_time_mean=False) expected = cftime.datetime(0, 0, 0, calendar=None, has_year_zero=True) - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: verify(cube, pp_field) actual = pp_field.t2 self.assertEqual(expected, actual) @@ -289,9 +279,7 @@ def test_lbft_no_forecast_time(self): cube = _get_single_time_cube() mock_lbft = mock.sentinel.lbft - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: pp_field.lbft = mock_lbft verify(cube, pp_field) actual = pp_field.lbft @@ -303,9 +291,7 @@ def test_lbtim_no_time_mean(self): expected_ib = 0 expected_ic = 2 # 360 day calendar - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: verify(cube, pp_field) actual_ib = pp_field.lbtim.ib actual_ic = pp_field.lbtim.ic @@ -318,9 +304,7 @@ def test_lbtim_time_mean(self): expected_ib = 2 # Time mean expected_ic = 2 # 360 day calendar - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: verify(cube, pp_field) actual_ib = pp_field.lbtim.ib actual_ic = pp_field.lbtim.ic @@ -332,9 +316,7 @@ def test_lbproc_no_time_mean(self): cube = _get_single_time_cube() expected = 0 - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: verify(cube, pp_field) actual = pp_field.lbproc @@ -344,9 +326,7 @@ def test_lbproc_time_mean(self): cube = _get_single_time_cube(set_time_mean=True) expected = 128 - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: verify(cube, pp_field) actual = pp_field.lbproc diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py index 591bfda857..3bdbdfb8a6 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py @@ -159,9 +159,7 @@ def test_lbcode3x23(self): t1=nc_datetime(2000, 1, 2, hour=0, minute=0, second=0), t2=nc_datetime(2000, 1, 3, hour=0, minute=0, second=0), lbtim=mock.Mock(ia=1, ib=2, ic=2), - lbcode=SplittableInt( - 31323, {"iy": slice(0, 2), "ix": slice(2, 4)} - ), + lbcode=SplittableInt(31323, {"iy": slice(0, 2), "ix": slice(2, 4)}), x_bounds=None, y_bounds=time_bounds, _x_coord_name=lambda: "longitude", diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py index c87e199956..d9a44fe013 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py @@ -14,9 +14,7 @@ import numpy as np -from iris.fileformats.pp_load_rules import ( - _collapse_degenerate_points_and_bounds, -) +from iris.fileformats.pp_load_rules import _collapse_degenerate_points_and_bounds class Test(tests.IrisTest): @@ -76,18 +74,14 @@ def test_multiple_odd_dims(self): def test_bounds_collapse(self): points = np.array([1, 1, 1]) bounds = np.array([[0, 1], [0, 1], [0, 1]]) - result_pts, result_bds = _collapse_degenerate_points_and_bounds( - points, bounds - ) + result_pts, result_bds = _collapse_degenerate_points_and_bounds(points, bounds) self.assertArrayEqual(result_pts, np.array([1])) self.assertArrayEqual(result_bds, np.array([[0, 1]])) def test_bounds_no_collapse(self): points = np.array([1, 1, 1]) bounds = np.array([[0, 1], [0, 1], [0, 2]]) - result_pts, result_bds = _collapse_degenerate_points_and_bounds( - points, bounds - ) + result_pts, result_bds = _collapse_degenerate_points_and_bounds(points, bounds) self.assertArrayEqual(result_pts, points) self.assertArrayEqual(result_bds, bounds) diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py index 5afaeee45d..4f50d682d5 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py @@ -334,12 +334,8 @@ def test(self): lbcode = _lbcode(value=31323) lbtim = _lbtim(ib=2, ic=2) calendar = CALENDAR_360_DAY - t1 = nc_datetime( - 1970, 1, 3, hour=0, minute=0, second=0, calendar=calendar - ) - t2 = nc_datetime( - 1970, 1, 4, hour=0, minute=0, second=0, calendar=calendar - ) + t1 = nc_datetime(1970, 1, 3, hour=0, minute=0, second=0, calendar=calendar) + t2 = nc_datetime(1970, 1, 4, hour=0, minute=0, second=0, calendar=calendar) lbft = 24 * 4 coords_and_dims = _convert_time_coords( lbcode=lbcode, @@ -404,10 +400,7 @@ def test_t1_list_t2_scalar(self): lbtim = _lbtim(ia=0, ib=1, ic=1) forecast_period_in_hours = np.array([0, 3, 6, 9, 12]) # Validity time - vector of different values - t1 = [ - nc_datetime(1970, 1, 9, hour=(3 + fp)) - for fp in forecast_period_in_hours - ] + t1 = [nc_datetime(1970, 1, 9, hour=(3 + fp)) for fp in forecast_period_in_hours] t1_dims = (0,) # Forecast reference time - scalar t2 = nc_datetime(1970, 1, 9, hour=3) @@ -454,15 +447,10 @@ def test_t1_and_t2_list(self): lbtim = _lbtim(ia=0, ib=1, ic=1) forecast_period_in_hours = np.array([0, 3, 6, 9, 12]) # Validity time - vector of different values - t1 = [ - nc_datetime(1970, 1, 9, hour=(3 + fp)) - for fp in forecast_period_in_hours - ] + t1 = [nc_datetime(1970, 1, 9, hour=(3 + fp)) for fp in forecast_period_in_hours] t1_dims = (0,) # Forecast reference time - vector of same values - t2 = [ - nc_datetime(1970, 1, 9, hour=3) for _ in forecast_period_in_hours - ] + t2 = [nc_datetime(1970, 1, 9, hour=3) for _ in forecast_period_in_hours] t2_dims = (0,) lbft = None # Not used. @@ -529,16 +517,11 @@ def test_t1_and_t2_orthogonal_lists(self): # Expected coords. points = [ - [(year - 1970) * 365 * 24 + 12 - hour for hour in hours] - for year in years + [(year - 1970) * 365 * 24 + 12 - hour for hour in hours] for year in years ] - fp_coord = AuxCoord( - points, standard_name="forecast_period", units="hours" - ) + fp_coord = AuxCoord(points, standard_name="forecast_period", units="hours") points = (years - 1970) * 24 * 365 + (24 * 8) + 12 - time_coord = DimCoord( - points, standard_name="time", units=_EPOCH_HOURS_UNIT - ) + time_coord = DimCoord(points, standard_name="time", units=_EPOCH_HOURS_UNIT) points = (24 * 8) + hours fref_time_coord = DimCoord( points, @@ -562,10 +545,7 @@ def test_t1_multi_dim_list_t2_scalar(self): years = np.array([1970, 1971, 1972]) # Validity time - 2d array of different values t1 = [ - [ - nc_datetime(year, 1, 9, hour=(3 + fp)) - for fp in forecast_period_in_hours - ] + [nc_datetime(year, 1, 9, hour=(3 + fp)) for fp in forecast_period_in_hours] for year in years ] t1_dims = (0, 1) @@ -585,19 +565,13 @@ def test_t1_multi_dim_list_t2_scalar(self): # Expected coords. fp_coord = AuxCoord( - [ - forecast_period_in_hours + (year - 1970) * 365 * 24 - for year in years - ], + [forecast_period_in_hours + (year - 1970) * 365 * 24 for year in years], standard_name="forecast_period", units="hours", ) time_coord = AuxCoord( [ - (24 * 8) - + 3 - + forecast_period_in_hours - + (year - 1970) * 365 * 24 + (24 * 8) + 3 + forecast_period_in_hours + (year - 1970) * 365 * 24 for year in years ], standard_name="time", @@ -624,10 +598,7 @@ def test_t1_and_t2_nparrays(self): forecast_period_in_hours = np.array([0, 3, 6, 9, 12]) # Validity time - vector of different values t1 = np.array( - [ - nc_datetime(1970, 1, 9, hour=(3 + fp)) - for fp in forecast_period_in_hours - ] + [nc_datetime(1970, 1, 9, hour=(3 + fp)) for fp in forecast_period_in_hours] ) t1_dims = (0,) # Forecast reference time - vector of same values @@ -696,9 +667,7 @@ def test_t1_list_t2_scalar(self): # Expected coords. points = lbft - (48 - hours) / 2.0 - bounds = np.array( - [lbft - (48 - hours), np.ones_like(hours) * lbft] - ).transpose() + bounds = np.array([lbft - (48 - hours), np.ones_like(hours) * lbft]).transpose() fp_coord = AuxCoord( points, standard_name="forecast_period", @@ -767,9 +736,7 @@ def test_t1_scalar_t2_list(self): bounds=bounds, ) points = (years - 1970) * 365 * 24 + 10 * 24 + 9 + leap_year_adjust - bounds = np.array( - [np.ones_like(points) * (8 * 24 + 9), points] - ).transpose() + bounds = np.array([np.ones_like(points) * (8 * 24 + 9), points]).transpose() # The time coordinate is an AuxCoord as the lower bound for each # cell is the same so it does not meet the monotonicity requirement. time_coord = AuxCoord( diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py index a7ed6355f6..a6a51a750b 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py @@ -325,9 +325,7 @@ def test_cross_section__vector(self): class TestLBVC006_SoilLevel(TestField): - def _check_soil_level( - self, lbcode, lblev=12.3, expect_match=True, dim=None - ): + def _check_soil_level(self, lbcode, lblev=12.3, expect_match=True, dim=None): lbvc = 6 stash = STASH(1, 1, 1) brsvd1, brlev = 0, 0 @@ -479,9 +477,7 @@ def _check_pressure(self, lbcode, blev=250.3, expect_match=True, dim=None): dim=dim, ) if expect_match: - expect_result = [ - (DimCoord(blev, long_name="pressure", units="hPa"), dim) - ] + expect_result = [(DimCoord(blev, long_name="pressure", units="hPa"), dim)] else: expect_result = [] self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) @@ -506,9 +502,7 @@ def test_pressure_cross_section(self): def test_pressure_cross_section__vector(self): blev = np.arange(10) - self._check_pressure( - _lbcode(ix=10, iy=1), blev=blev, dim=1, expect_match=False - ) + self._check_pressure(_lbcode(ix=10, iy=1), blev=blev, dim=1, expect_match=False) class TestLBVC019_PotentialTemperature(TestField): @@ -565,9 +559,7 @@ def test_cross_section(self): def test_cross_section__vector(self): blev = np.arange(5) + 100 - self._check_potm( - _lbcode(ix=10, iy=11), blev=blev, dim=1, expect_match=False - ) + self._check_potm(_lbcode(ix=10, iy=11), blev=blev, dim=1, expect_match=False) class TestLBVC009_HybridPressure(TestField): @@ -647,9 +639,7 @@ def _check( ], ) ] - self.assertCoordsAndDimsListsMatch( - coords_and_dims, expect_coords_and_dims - ) + self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_coords_and_dims) self.assertEqual(factories, expect_factories) def test_normal(self): @@ -751,9 +741,7 @@ def _check( ], ) ] - self.assertCoordsAndDimsListsMatch( - coords_and_dims, expect_coords_and_dims - ) + self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_coords_and_dims) self.assertEqual(factories, expect_factories) def test_normal(self): diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py index 2724d45871..af717bb62e 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py @@ -16,9 +16,7 @@ from cf_units import Unit from cftime import datetime as nc_datetime -from iris.fileformats.pp_load_rules import ( - _epoch_date_hours as epoch_hours_call, -) +from iris.fileformats.pp_load_rules import _epoch_date_hours as epoch_hours_call # # Run tests for each of the possible calendars from PPfield.calendar(). @@ -43,9 +41,7 @@ def test_ymd_1_1_1(self): self.assertEqual(result, -17259936.0) def test_year_0(self): - test_date = nc_datetime( - 0, 1, 1, calendar=self.calendar, has_year_zero=True - ) + test_date = nc_datetime(0, 1, 1, calendar=self.calendar, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) self.assertEqual(result, -17268720.0) @@ -95,9 +91,7 @@ def test_ymd_1_1_1(self): self.assertEqual(result, -17012160.0) def test_year_0(self): - test_date = nc_datetime( - 0, 1, 1, calendar=self.calendar, has_year_zero=True - ) + test_date = nc_datetime(0, 1, 1, calendar=self.calendar, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) self.assertEqual(result, -17020800.0) @@ -123,9 +117,7 @@ def test_ymd_1_1_1(self): self.assertEqual(result, -17248440.0) def test_year_0(self): - test_date = nc_datetime( - 0, 1, 1, calendar=self.calendar, has_year_zero=True - ) + test_date = nc_datetime(0, 1, 1, calendar=self.calendar, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) self.assertEqual(result, -17257200.0) @@ -141,9 +133,7 @@ def test_bad_calendar(self): # Setup a unit with an unrecognised calendar hrs_unit = Unit("hours since epoch", calendar=self.calendar) # Test against a date with year=0, which requires calendar correction. - test_date = nc_datetime( - 0, 1, 1, calendar=self.calendar, has_year_zero=True - ) + test_date = nc_datetime(0, 1, 1, calendar=self.calendar, has_year_zero=True) # Check that this causes an error. with self.assertRaisesRegex(ValueError, "unrecognised calendar"): epoch_hours_call(hrs_unit, test_date) diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py index 0b46d11f9d..476ecbc8ae 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py @@ -260,16 +260,11 @@ def base_field(self): @staticmethod def is_forecast_period(coord): - return ( - coord.standard_name == "forecast_period" and coord.units == "hours" - ) + return coord.standard_name == "forecast_period" and coord.units == "hours" @staticmethod def is_time(coord): - return ( - coord.standard_name == "time" - and coord.units == "hours since epoch" - ) + return coord.standard_name == "time" and coord.units == "hours since epoch" def test_time_mean_ib2(self): field = self.base_field() @@ -349,9 +344,7 @@ def test_realization(self): class TestLBSRCE(iris.tests.IrisTest): - def check_um_source_attrs( - self, lbsrce, source_str=None, um_version_str=None - ): + def check_um_source_attrs(self, lbsrce, source_str=None, um_version_str=None): field = _mock_field(lbsrce=lbsrce) ( factories, @@ -374,9 +367,7 @@ def check_um_source_attrs( self.assertNotIn("um_version", attributes) def test_none(self): - self.check_um_source_attrs( - lbsrce=8123, source_str=None, um_version_str=None - ) + self.check_um_source_attrs(lbsrce=8123, source_str=None, um_version_str=None) def test_no_um_version(self): self.check_um_source_attrs( diff --git a/lib/iris/tests/unit/fileformats/rules/test_Loader.py b/lib/iris/tests/unit/fileformats/rules/test_Loader.py index b99d1e6f40..fafa018d3a 100644 --- a/lib/iris/tests/unit/fileformats/rules/test_Loader.py +++ b/lib/iris/tests/unit/fileformats/rules/test_Loader.py @@ -23,9 +23,7 @@ def test_normal(self): ) self.assertEqual(warn.call_count, 0) self.assertIs(loader.field_generator, mock.sentinel.GEN_FUNC) - self.assertIs( - loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS - ) + self.assertIs(loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS) self.assertIs(loader.converter, mock.sentinel.CONVERTER) def test_normal_with_explicit_none(self): @@ -37,9 +35,7 @@ def test_normal_with_explicit_none(self): ) self.assertEqual(warn.call_count, 0) self.assertIs(loader.field_generator, mock.sentinel.GEN_FUNC) - self.assertIs( - loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS - ) + self.assertIs(loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS) self.assertIs(loader.converter, mock.sentinel.CONVERTER) diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py index 9f5466afaa..685b5fc6d1 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py @@ -51,9 +51,7 @@ def test_1d_ones(self): def test_1d_range(self): a = np.arange(6) - self.assertEqual( - self.struct_from_arr(a), ArrayStructure(1, list(range(6))) - ) + self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, list(range(6)))) def test_3d_ones(self): a = np.ones([10, 2, 1]) @@ -61,9 +59,7 @@ def test_3d_ones(self): def test_1d_over_2d_first_dim_manual(self): sub = np.array([10, 10, 20, 20]) - self.assertEqual( - self.struct_from_arr(sub), ArrayStructure(2, [10, 20]) - ) + self.assertEqual(self.struct_from_arr(sub), ArrayStructure(2, [10, 20])) def test_3d_first_dimension(self): flattened = np.array([1, 1, 1, 2, 2, 2]) @@ -161,9 +157,7 @@ def test_single_vector(self): orig = construct_nd(np.array([1, 2]), 0, (2, 1, 3)) flattened = orig.flatten(order=self.order) struct = ArrayStructure.from_array(flattened) - array, dims = struct.nd_array_and_dims( - flattened, (2, 1, 3), order=self.order - ) + array, dims = struct.nd_array_and_dims(flattened, (2, 1, 3), order=self.order) self.assertArrayEqual(array, [1, 2]) self.assertEqual(dims, (0,)) @@ -172,9 +166,7 @@ def test_single_vector_3rd_dim(self): flattened = orig.flatten(order=self.order) struct = ArrayStructure.from_array(flattened) - array, dims = struct.nd_array_and_dims( - flattened, (4, 1, 3), order=self.order - ) + array, dims = struct.nd_array_and_dims(flattened, (4, 1, 3), order=self.order) self.assertArrayEqual(array, [1, 2, 3]) self.assertEqual(dims, (2,)) diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py index ec98664f51..9eb2e7e8e6 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py @@ -26,9 +26,7 @@ def regular_array_structures(shape, names="abcdefg"): running_product = 1 array_structures = {} for name, dim_len in zip(names, shape): - array_structures[name] = ArrayStructure( - running_product, np.arange(dim_len) - ) + array_structures[name] = ArrayStructure(running_product, np.arange(dim_len)) running_product *= dim_len return array_structures @@ -75,8 +73,7 @@ def assert_potentials(self, length, array_structures, expected): structure = GroupStructure(length, array_structures, array_order="f") allowed = structure.possible_structures() names = [ - [name for (name, _) in allowed_structure] - for allowed_structure in allowed + [name for (name, _) in allowed_structure] for allowed_structure in allowed ] self.assertEqual(names, expected) @@ -92,9 +89,7 @@ def test_alternate_potentials(self): # More than one potential dimension for dim 1. array_structures = regular_array_structures((4, 2, 3)) array_structures.update(regular_array_structures((6, 4), names="xy")) - self.assert_potentials( - 24, array_structures, [["x", "y"], ["a", "b", "c"]] - ) + self.assert_potentials(24, array_structures, [["x", "y"], ["a", "b", "c"]]) def test_shared_first_dimension(self): # One 2d potential as well as one 3d, using the same first dimension. @@ -140,9 +135,7 @@ def test_build_arrays_regular_f_order(self): result = grp.build_arrays((2, 3), {"a": a, "b": b}) self.assert_built_array("a", result, ([0, 1], (0,))) - self.assert_built_array( - "b", result, ([[0, 100], [1, 101], [2, 102]], (1,)) - ) + self.assert_built_array("b", result, ([[0, 100], [1, 101], [2, 102]], (1,))) def test_build_arrays_unstructured(self): # Check that an unstructured array gets reshaped appropriately. @@ -177,9 +170,7 @@ def test_structured_array_not_applicable(self): elements["d"] = ArrayStructure(3, np.arange(4)) grp = GroupStructure(12, elements, array_order="f") - d = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]).reshape( - (3, 4), order="f" - ) + d = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]).reshape((3, 4), order="f") expected = np.array([[[0, 1, 2], [0, 2, 3]], [[0, 1, 3], [1, 2, 3]]]) r = grp.build_arrays( (2, 2, 3), diff --git a/lib/iris/tests/unit/fileformats/test_rules.py b/lib/iris/tests/unit/fileformats/test_rules.py index b7e17b205e..3f271091af 100644 --- a/lib/iris/tests/unit/fileformats/test_rules.py +++ b/lib/iris/tests/unit/fileformats/test_rules.py @@ -116,14 +116,11 @@ def field_generator(filename): factory = mock.Mock() factory.args = [{"name": "foo"}] factory.factory_class = ( - lambda *args: setattr(aux_factory, "fake_args", args) - or aux_factory + lambda *args: setattr(aux_factory, "fake_args", args) or aux_factory ) def converter(field): - return ConversionMetadata( - [factory], [], "", "", "", {}, [], [], [] - ) + return ConversionMetadata([factory], [], "", "", "", {}, [], [], []) # Finish by making a fake Loader fake_loader = Loader(field_generator, {}, converter) @@ -196,9 +193,7 @@ def field_generator(filename): def converter(field): if field is press_field: src = param_cube - factories = [ - Factory(HybridHeightFactory, [Reference("orography")]) - ] + factories = [Factory(HybridHeightFactory, [Reference("orography")])] references = [] else: src = orog_cube diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py index 930050813f..24b438f76f 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py @@ -43,24 +43,16 @@ def fast_load_callback(cube, collation, filename): def test_callback_collations_filepaths(self): self.assertEqual(len(self.callback_collations), 2) - self.assertEqual( - self.callback_collations[0].data_filepath, self.test_filepath - ) - self.assertEqual( - self.callback_collations[1].data_filepath, self.test_filepath - ) + self.assertEqual(self.callback_collations[0].data_filepath, self.test_filepath) + self.assertEqual(self.callback_collations[1].data_filepath, self.test_filepath) def test_callback_collations_field_indices(self): - self.assertEqual( - self.callback_collations[0].data_field_indices.dtype, np.int64 - ) + self.assertEqual(self.callback_collations[0].data_field_indices.dtype, np.int64) self.assertArrayEqual( self.callback_collations[0].data_field_indices, [[1, 3], [5, 7]] ) - self.assertEqual( - self.callback_collations[1].data_field_indices.dtype, np.int64 - ) + self.assertEqual(self.callback_collations[1].data_field_indices.dtype, np.int64) self.assertArrayEqual( self.callback_collations[1].data_field_indices, [[0, 2], [4, 6]] ) diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py index f6e3fd5928..6d2c95eaff 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py @@ -19,9 +19,7 @@ import iris.coords import iris.fileformats.pp import iris.fileformats.rules -from iris.fileformats.um._fast_load import ( - _convert_collation as convert_collation, -) +from iris.fileformats.um._fast_load import _convert_collation as convert_collation COORD_SYSTEM = iris.coord_systems.GeogCS(6371229.0) LATITUDE = iris.coords.DimCoord( @@ -117,9 +115,7 @@ def test_vector_t1(self): (LONGITUDE, 2), (LATITUDE, 1), ( - iris.coords.DimCoord( - [18, 24, 30], "time", units="hours since epoch" - ), + iris.coords.DimCoord([18, 24, 30], "time", units="hours since epoch"), (0,), ), ] @@ -132,9 +128,7 @@ def test_vector_t1(self): None, ), ( - iris.coords.DimCoord( - [6, 12, 18], "forecast_period", units="hours" - ), + iris.coords.DimCoord([6, 12, 18], "forecast_period", units="hours"), (0,), ), ] @@ -178,9 +172,7 @@ def test_vector_t2(self): None, ), ( - iris.coords.DimCoord( - [6, 3, 0.0], "forecast_period", units="hours" - ), + iris.coords.DimCoord([6, 3, 0.0], "forecast_period", units="hours"), (0,), ), ] @@ -256,9 +248,7 @@ def test_vector_t1_and_t2(self): (LONGITUDE, 3), (LATITUDE, 2), ( - iris.coords.DimCoord( - [30, 33, 36], "time", units="hours since epoch" - ), + iris.coords.DimCoord([30, 33, 36], "time", units="hours since epoch"), (1,), ), ( diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py index 19c64ec57a..e3e22b94e1 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py @@ -17,9 +17,7 @@ from iris._lazy_data import as_lazy_data import iris.fileformats.pp -from iris.fileformats.um._fast_load_structured_fields import ( - BasicFieldCollation, -) +from iris.fileformats.um._fast_load_structured_fields import BasicFieldCollation class Test___init__(tests.IrisTest): @@ -35,9 +33,7 @@ def test_preserve_members(self): self.assertEqual(collation.fields, fields) -def _make_field( - lbyr=None, lbyrd=None, lbft=None, blev=None, bhlev=None, data=None -): +def _make_field(lbyr=None, lbyrd=None, lbft=None, blev=None, bhlev=None, data=None): header = [0] * 64 if lbyr is not None: header[0] = lbyr @@ -111,9 +107,7 @@ def test_t1(self): result = collation.element_arrays_and_dims self.assertEqual(list(result.keys()), ["t1"]) values, dims = result["t1"] - self.assertArrayEqual( - values, [datetime(2013, 1, 1), datetime(2014, 1, 1)] - ) + self.assertArrayEqual(values, [datetime(2013, 1, 1), datetime(2014, 1, 1)]) self.assertEqual(dims, (0,)) def test_t1_and_t2(self): @@ -167,13 +161,9 @@ def test_t1_and_t2_and_lbft(self): self.assertEqual(dims, (1,)) def test_blev(self): - collation = BasicFieldCollation( - [_make_field(blev=1), _make_field(blev=2)] - ) + collation = BasicFieldCollation([_make_field(blev=1), _make_field(blev=2)]) result = collation.element_arrays_and_dims - keys = set( - ["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"] - ) + keys = set(["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"]) self.assertEqual(set(result.keys()), keys) values, dims = result["blev"] self.assertArrayEqual(values, [1, 2]) @@ -184,9 +174,7 @@ def test_bhlev(self): [_make_field(blev=0, bhlev=1), _make_field(blev=1, bhlev=2)] ) result = collation.element_arrays_and_dims - keys = set( - ["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"] - ) + keys = set(["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"]) self.assertEqual(set(result.keys()), keys) values, dims = result["bhlev"] self.assertArrayEqual(values, [1, 2]) diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py index 75b54dfd4f..3b586434b6 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py @@ -14,9 +14,7 @@ from unittest import mock -from iris.fileformats.um._fast_load_structured_fields import ( - group_structured_fields, -) +from iris.fileformats.um._fast_load_structured_fields import group_structured_fields def _convert_to_vector(value, length, default): @@ -108,25 +106,17 @@ def test_2d_combines(self): stashes=[11, 11, 15, 11], lbprocs=[31, 42, 31, 42] ) result = self._group_result(fields_iter) - self.assertEqual( - result, self._test_fields([(1001,), (1002, 1004), (1003,)]) - ) + self.assertEqual(result, self._test_fields([(1001,), (1002, 1004), (1003,)])) def test_sortorder(self): fields_iter = self._dummy_fields_iter(stashes=[11, 7, 12]) result = self._group_result(fields_iter) - self.assertEqual( - result, self._test_fields([(1002,), (1001,), (1003,)]) - ) + self.assertEqual(result, self._test_fields([(1002,), (1001,), (1003,)])) def test_sortorder_2d(self): - fields_iter = self._dummy_fields_iter( - stashes=[11, 11, 12], lbprocs=[31, 9, 1] - ) + fields_iter = self._dummy_fields_iter(stashes=[11, 11, 12], lbprocs=[31, 9, 1]) result = self._group_result(fields_iter) - self.assertEqual( - result, self._test_fields([(1002,), (1001,), (1003,)]) - ) + self.assertEqual(result, self._test_fields([(1002,), (1001,), (1003,)])) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py index 92a8b19ec0..cf545e89d5 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py @@ -14,9 +14,7 @@ import numpy as np -from iris.fileformats.um._optimal_array_structuring import ( - optimal_array_structure, -) +from iris.fileformats.um._optimal_array_structuring import optimal_array_structure class Test__optimal_dimensioning_structure: @@ -33,17 +31,13 @@ def _check_arrays_and_dims(self, result, spec): result_dims, spec_dims, 'element dims differ for "{}": ' - "result={!r}, expected {!r}".format( - keyname, result_dims, spec_dims - ), + "result={!r}, expected {!r}".format(keyname, result_dims, spec_dims), ) self.assertArrayEqual( result_array, spec_array, 'element arrays differ for "{}": ' - "result={!r}, expected {!r}".format( - keyname, result_array, spec_array - ), + "result={!r}, expected {!r}".format(keyname, result_array, spec_array), ) def test_none(self): @@ -63,9 +57,7 @@ def test_1d(self): shape, primaries, elems_and_dims = optimal_array_structure(elements) self.assertEqual(shape, (3,)) self.assertEqual(primaries, set("a")) - self._check_arrays_and_dims( - elems_and_dims, {"a": (np.array([1, 2, 4]), (0,))} - ) + self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([1, 2, 4]), (0,))}) def test_1d_actuals(self): # Test use of alternate element values for array construction. @@ -76,9 +68,7 @@ def test_1d_actuals(self): ) self.assertEqual(shape, (3,)) self.assertEqual(primaries, set("a")) - self._check_arrays_and_dims( - elems_and_dims, {"a": (np.array([7, 3, 9]), (0,))} - ) + self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([7, 3, 9]), (0,))}) def test_actuals_mismatch_fail(self): elements = [("a", np.array([1, 2, 4]))] @@ -144,9 +134,7 @@ def test_degenerate(self): shape, primaries, elems_and_dims = optimal_array_structure(elements) self.assertEqual(shape, (3,)) self.assertEqual(primaries, set(["a"])) - self._check_arrays_and_dims( - elems_and_dims, {"a": (np.array([1, 2, 3]), (0,))} - ) + self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([1, 2, 3]), (0,))}) def test_1d_duplicates(self): # When two have the same structure, the first is 'the dimension'. diff --git a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py index 40ac6826d3..c6256ab015 100644 --- a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py +++ b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py @@ -33,9 +33,7 @@ def test__call(self): # Call um_to_pp while patching the um._ff_replacement.FF2PP class. test_path = "/any/old/file.name" - with mock.patch( - "iris.fileformats.um._ff_replacement.FF2PP", mock_ff2pp_class - ): + with mock.patch("iris.fileformats.um._ff_replacement.FF2PP", mock_ff2pp_class): result = um_to_pp(test_path) # Check that it called FF2PP in the expected way. @@ -43,9 +41,7 @@ def test__call(self): mock_ff2pp_class.call_args_list, [mock.call("/any/old/file.name", read_data=False)], ) - self.assertEqual( - mock_ff2pp_instance.__iter__.call_args_list, [mock.call()] - ) + self.assertEqual(mock_ff2pp_instance.__iter__.call_args_list, [mock.call()]) # Check that it returned the expected result. self.assertIs(result, mock_iterator) diff --git a/lib/iris/tests/unit/io/test_expand_filespecs.py b/lib/iris/tests/unit/io/test_expand_filespecs.py index bd5e5933a3..567fde4e79 100644 --- a/lib/iris/tests/unit/io/test_expand_filespecs.py +++ b/lib/iris/tests/unit/io/test_expand_filespecs.py @@ -44,9 +44,7 @@ def test_relative_path(self): try: os.chdir(self.tmpdir) item_out = iio.expand_filespecs(["*"]) - item_in = [ - os.path.join(self.tmpdir, fname) for fname in self.fnames - ] + item_in = [os.path.join(self.tmpdir, fname) for fname in self.fnames] self.assertEqual(item_out, item_in) finally: os.chdir(cwd) @@ -61,9 +59,7 @@ def test_return_order(self): os.path.join(self.tmpdir, "a.*"), os.path.join(self.tmpdir, "b.*"), ] - expected = [ - os.path.join(self.tmpdir, fname) for fname in ["a.foo", "b.txt"] - ] + expected = [os.path.join(self.tmpdir, fname) for fname in ["a.foo", "b.txt"]] result = iio.expand_filespecs(patterns) self.assertEqual(result, expected) result = iio.expand_filespecs(patterns[::-1]) diff --git a/lib/iris/tests/unit/io/test_run_callback.py b/lib/iris/tests/unit/io/test_run_callback.py index cd55743b29..8711f49fbd 100644 --- a/lib/iris/tests/unit/io/test_run_callback.py +++ b/lib/iris/tests/unit/io/test_run_callback.py @@ -21,9 +21,7 @@ def setUp(self): def test_no_callback(self): # No callback results in the cube being returned. - self.assertEqual( - iris.io.run_callback(None, self.cube, None, None), self.cube - ) + self.assertEqual(iris.io.run_callback(None, self.cube, None, None), self.cube) def test_ignore_cube(self): # Ignore cube should result in None being returned. @@ -31,9 +29,7 @@ def callback(cube, field, fname): raise iris.exceptions.IgnoreCubeException() cube = self.cube - self.assertEqual( - iris.io.run_callback(callback, cube, None, None), None - ) + self.assertEqual(iris.io.run_callback(callback, cube, None, None), None) def test_callback_no_return(self): # Check that a callback not returning anything still results in the @@ -42,9 +38,7 @@ def callback(cube, field, fname): pass cube = self.cube - self.assertEqual( - iris.io.run_callback(callback, cube, None, None), cube - ) + self.assertEqual(iris.io.run_callback(callback, cube, None, None), cube) def test_bad_callback_return_type(self): # Check that a TypeError is raised with a bad callback return value. @@ -52,7 +46,7 @@ def callback(cube, field, fname): return iris.cube.CubeList() with self.assertRaisesRegex( - TypeError, "Callback function returned an " "unhandled data type." + TypeError, "Callback function returned an unhandled data type." ): iris.io.run_callback(callback, None, None, None) diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py index 2222d185c3..0f741b10a3 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py @@ -87,9 +87,7 @@ def test_chunk_size_limiting(self): ] err_fmt = "Result of optimising chunks {} was {}, expected {}" for shape, expected in given_shapes_and_resulting_chunks: - chunks = _optimum_chunksize( - shape, shape, limit=self.FIXED_CHUNKSIZE_LIMIT - ) + chunks = _optimum_chunksize(shape, shape, limit=self.FIXED_CHUNKSIZE_LIMIT) msg = err_fmt.format(shape, chunks, expected) self.assertEqual(chunks, expected, msg) @@ -146,17 +144,13 @@ def test_chunk_expanding_equal_division(self): result = _optimum_chunksize( chunks=chunks, shape=shape, limit=limit, dtype=np.dtype("b1") ) - msg = err_fmt_main.format( - chunks, shape, limit, result, expected_result - ) + msg = err_fmt_main.format(chunks, shape, limit, result, expected_result) self.assertEqual(result, expected_result, msg) def test_default_chunksize(self): # Check that the "ideal" chunksize is taken from the dask config. with dask.config.set({"array.chunk-size": "20b"}): - chunks = _optimum_chunksize( - (1, 8), shape=(400, 20), dtype=np.dtype("f4") - ) + chunks = _optimum_chunksize((1, 8), shape=(400, 20), dtype=np.dtype("f4")) self.assertEqual(chunks, (1, 4)) def test_default_chunks_limiting(self): diff --git a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py index 1c694d292b..7403a5611e 100644 --- a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py +++ b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py @@ -38,9 +38,7 @@ def setUp(self): def test_non_lazy_input(self): # Check that a non-lazy input doesn't trip up the functionality. cube, cube_data = create_mock_cube(self.array) - result = map_complete_blocks( - cube, self.func, dims=(1,), out_sizes=(4,) - ) + result = map_complete_blocks(cube, self.func, dims=(1,), out_sizes=(4,)) self.assertFalse(is_lazy_data(result)) self.assertArrayEqual(result, self.func_result) # check correct data was accessed @@ -50,9 +48,7 @@ def test_non_lazy_input(self): def test_lazy_input(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (4,))) cube, cube_data = create_mock_cube(lazy_array) - result = map_complete_blocks( - cube, self.func, dims=(1,), out_sizes=(4,) - ) + result = map_complete_blocks(cube, self.func, dims=(1,), out_sizes=(4,)) self.assertTrue(is_lazy_data(result)) self.assertArrayEqual(result.compute(), self.func_result) # check correct data was accessed @@ -61,18 +57,14 @@ def test_lazy_input(self): def test_dask_array_input(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (4,))) - result = map_complete_blocks( - lazy_array, self.func, dims=(1,), out_sizes=(4,) - ) + result = map_complete_blocks(lazy_array, self.func, dims=(1,), out_sizes=(4,)) self.assertTrue(is_lazy_data(result)) self.assertArrayEqual(result.compute(), self.func_result) def test_rechunk(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (2, 2))) cube, _ = create_mock_cube(lazy_array) - result = map_complete_blocks( - cube, self.func, dims=(1,), out_sizes=(4,) - ) + result = map_complete_blocks(cube, self.func, dims=(1,), out_sizes=(4,)) self.assertTrue(is_lazy_data(result)) self.assertArrayEqual(result.compute(), self.func_result) @@ -92,9 +84,7 @@ def test_multidimensional_input(self): array = np.arange(2 * 3 * 4).reshape(2, 3, 4) lazy_array = da.asarray(array, chunks=((1, 1), (1, 2), (4,))) cube, _ = create_mock_cube(lazy_array) - result = map_complete_blocks( - cube, self.func, dims=(1, 2), out_sizes=(3, 4) - ) + result = map_complete_blocks(cube, self.func, dims=(1, 2), out_sizes=(3, 4)) self.assertTrue(is_lazy_data(result)) self.assertArrayEqual(result.compute(), array + 1) diff --git a/lib/iris/tests/unit/merge/test_ProtoCube.py b/lib/iris/tests/unit/merge/test_ProtoCube.py index 80f135e108..6174e6a9d8 100644 --- a/lib/iris/tests/unit/merge/test_ProtoCube.py +++ b/lib/iris/tests/unit/merge/test_ProtoCube.py @@ -304,33 +304,21 @@ def test_scalar_defns_both_extra(self): self.check_fail("aux_coords (scalar)", "latitude", "longitude") def test_vector_dim_coords_and_dims_one_extra(self): - self.cube2.add_dim_coord( - DimCoord([1, 2, 3], standard_name="latitude"), 0 - ) + self.cube2.add_dim_coord(DimCoord([1, 2, 3], standard_name="latitude"), 0) self.check_fail("dim_coords", "latitude") def test_vector_dim_coords_and_dims_both_extra(self): - self.cube2.add_dim_coord( - DimCoord([1, 2, 3], standard_name="latitude"), 0 - ) - self.cube1.add_dim_coord( - DimCoord([1, 2, 3], standard_name="longitude"), 0 - ) + self.cube2.add_dim_coord(DimCoord([1, 2, 3], standard_name="latitude"), 0) + self.cube1.add_dim_coord(DimCoord([1, 2, 3], standard_name="longitude"), 0) self.check_fail("dim_coords", "latitude", "longitude") def test_vector_aux_coords_and_dims_one_extra(self): - self.cube2.add_aux_coord( - DimCoord([1, 2, 3], standard_name="latitude"), 0 - ) + self.cube2.add_aux_coord(DimCoord([1, 2, 3], standard_name="latitude"), 0) self.check_fail("aux_coords (non-scalar)", "latitude") def test_vector_aux_coords_and_dims_both_extra(self): - self.cube2.add_aux_coord( - DimCoord([1, 2, 3], standard_name="latitude"), 0 - ) - self.cube1.add_aux_coord( - DimCoord([1, 2, 3], standard_name="longitude"), 0 - ) + self.cube2.add_aux_coord(DimCoord([1, 2, 3], standard_name="latitude"), 0) + self.cube1.add_aux_coord(DimCoord([1, 2, 3], standard_name="longitude"), 0) self.check_fail("aux_coords (non-scalar)", "latitude", "longitude") def test_factory_defns_one_extra(self): @@ -367,15 +355,9 @@ def test_noise(self): cube2.add_dim_coord(DimCoord([1, 2, 3], standard_name="longitude"), 1) cube2.add_dim_coord(DimCoord([1, 2, 3], standard_name="altitude"), 2) - self.cube1.add_dim_coord( - DimCoord([1, 2, 3], long_name="equinimity"), 0 - ) - self.cube1.add_dim_coord( - DimCoord([1, 2, 3], long_name="equinomity"), 1 - ) - self.cube1.add_dim_coord( - DimCoord([1, 2, 3], long_name="equinumity"), 2 - ) + self.cube1.add_dim_coord(DimCoord([1, 2, 3], long_name="equinimity"), 0) + self.cube1.add_dim_coord(DimCoord([1, 2, 3], long_name="equinomity"), 1) + self.cube1.add_dim_coord(DimCoord([1, 2, 3], long_name="equinumity"), 2) # dim cube2.add_aux_coord(DimCoord([1, 2, 3], var_name="one"), 0) @@ -408,9 +390,7 @@ def test_nochange(self): proto_cube.register(self.cube2, error_on_mismatch=True) def _props_fail(self, *terms): - self.check_fail( - self._mergetest_type, self.coord_to_change.name(), *terms - ) + self.check_fail(self._mergetest_type, self.coord_to_change.name(), *terms) def test_standard_name(self): self.coord_to_change.standard_name = "soil_temperature" diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index 6f617439db..c0785459ca 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -165,9 +165,7 @@ class TestAsDataFrame(tests.IrisTest): """Test conversion of 2D cubes to Pandas using as_data_frame()""" def test_no_dim_coords(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo" - ) + cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") expected_index = [0, 1] expected_columns = [0, 1, 2, 3, 4] data_frame = iris.pandas.as_data_frame(cube) @@ -176,9 +174,7 @@ def test_no_dim_coords(self): self.assertArrayEqual(data_frame.columns, expected_columns) def test_no_x_coord(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo" - ) + cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") y_coord = DimCoord([10, 11], long_name="bar") cube.add_dim_coord(y_coord, 0) expected_index = [10, 11] @@ -189,9 +185,7 @@ def test_no_x_coord(self): self.assertArrayEqual(data_frame.columns, expected_columns) def test_no_y_coord(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo" - ) + cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") x_coord = DimCoord([10, 11, 12, 13, 14], long_name="bar") cube.add_dim_coord(x_coord, 1) expected_index = [0, 1] @@ -202,9 +196,7 @@ def test_no_y_coord(self): self.assertArrayEqual(data_frame.columns, expected_columns) def test_simple(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo" - ) + cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") x_coord = DimCoord([10, 11, 12, 13, 14], long_name="bar") y_coord = DimCoord([15, 16], long_name="milk") cube.add_dim_coord(x_coord, 1) @@ -230,9 +222,7 @@ def test_masked(self): self.assertArrayEqual(data_frame.columns, expected_columns) def test_time_standard(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts" - ) + cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts") day_offsets = [0, 100.1, 200.2, 300.3, 400.4] time_coord = DimCoord( day_offsets, long_name="time", units="days since 2000-01-01 00:00" @@ -244,24 +234,18 @@ def test_time_standard(self): days_to_2000 = 365 * 30 + 7 # pandas Timestamp class cannot handle floats in pandas 1: - index = pandas.MultiIndex.from_product( - index_values, names=index_names - ) + index = pandas.MultiIndex.from_product(index_values, names=index_names) data_length = index.nunique() else: index = None @@ -982,9 +846,7 @@ def test_1d_series_with_index(self): series = self._create_pandas(index_levels=1, is_series=True) result = iris.pandas.as_cubes(series) - expected_coord = DimCoord( - series.index.values, long_name=series.index.name - ) + expected_coord = DimCoord(series.index.values, long_name=series.index.name) (result_cube,) = result assert result_cube.dim_coords == (expected_coord,) @@ -993,8 +855,7 @@ def test_3d(self): result = iris.pandas.as_cubes(df) expected_coords = [ - DimCoord(level.values, long_name=level.name) - for level in df.index.levels + DimCoord(level.values, long_name=level.name) for level in df.index.levels ] (result_cube,) = result assert result_cube.dim_coords == tuple(expected_coords) @@ -1043,9 +904,7 @@ def test_aux_coord(self): df[coord_name] = df.index.values result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) - expected_aux_coord = AuxCoord( - df[coord_name].values, long_name=coord_name - ) + expected_aux_coord = AuxCoord(df[coord_name].values, long_name=coord_name) (result_cube,) = result assert result_cube.aux_coords == (expected_aux_coord,) @@ -1065,9 +924,7 @@ def test_ancillary_variable(self): df[coord_name] = df.index.values result = iris.pandas.as_cubes(df, ancillary_variable_cols=[coord_name]) - expected_av = AncillaryVariable( - df[coord_name].values, long_name=coord_name - ) + expected_av = AncillaryVariable(df[coord_name].values, long_name=coord_name) (result_cube,) = result assert result_cube.ancillary_variables() == [expected_av] @@ -1279,9 +1136,7 @@ def test_coord_never_view(self): df = self._create_pandas() coord_name = "foo" df[coord_name] = df.index.values - result = iris.pandas.as_cubes( - df, copy=False, aux_coord_cols=[coord_name] - ) + result = iris.pandas.as_cubes(df, copy=False, aux_coord_cols=[coord_name]) # Modify AFTER creating the Cube(s). df[coord_name][0] += 1 @@ -1316,9 +1171,7 @@ def _test_dates_common(self, mode=None, alt_calendar=False): df[coord_name] = values kwargs["aux_coord_cols"] = [coord_name] elif mode == "cftime": - values = [ - cftime.datetime(*a, calendar=calendar) for a in datetime_args - ] + values = [cftime.datetime(*a, calendar=calendar) for a in datetime_args] df[coord_name] = values kwargs["aux_coord_cols"] = [coord_name] else: diff --git a/lib/iris/tests/unit/plot/__init__.py b/lib/iris/tests/unit/plot/__init__.py index 7481cdafa3..5bd4f4785b 100644 --- a/lib/iris/tests/unit/plot/__init__.py +++ b/lib/iris/tests/unit/plot/__init__.py @@ -18,9 +18,7 @@ class TestGraphicStringCoord(tests.GraphicsTest): def setUp(self): super().setUp() self.cube = simple_2d(with_bounds=True) - self.cube.add_aux_coord( - AuxCoord(list("abcd"), long_name="str_coord"), 1 - ) + self.cube.add_aux_coord(AuxCoord(list("abcd"), long_name="str_coord"), 1) self.lat_lon_cube = lat_lon_cube() def tick_loc_and_label(self, axis_name, axes=None): diff --git a/lib/iris/tests/unit/plot/_blockplot_common.py b/lib/iris/tests/unit/plot/_blockplot_common.py index e3e88304fa..04a7d8866f 100644 --- a/lib/iris/tests/unit/plot/_blockplot_common.py +++ b/lib/iris/tests/unit/plot/_blockplot_common.py @@ -54,9 +54,7 @@ def test_geoaxes_exception(self): fig = plt.figure() ax = fig.add_subplot(111) - self.assertRaises( - TypeError, self.blockplot_func(), self.lat_lon_cube, axes=ax - ) + self.assertRaises(TypeError, self.blockplot_func(), self.lat_lon_cube, axes=ax) plt.close(fig) diff --git a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py index 3e25f0aadb..b336c429bc 100644 --- a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py +++ b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py @@ -16,10 +16,7 @@ from iris.coords import DimCoord from iris.plot import _check_bounds_contiguity_and_mask -from iris.tests.stock import ( - make_bounds_discontiguous_at_point, - sample_2d_latlons, -) +from iris.tests.stock import make_bounds_discontiguous_at_point, sample_2d_latlons @tests.skip_plot @@ -91,9 +88,7 @@ def test_2d_discontigous_unmasked(self): msg = "coordinate are not contiguous" cube.data[3, 4] = ma.nomask with self.assertRaisesRegex(ValueError, msg): - _check_bounds_contiguity_and_mask( - cube.coord("longitude"), cube.data - ) + _check_bounds_contiguity_and_mask(cube.coord("longitude"), cube.data) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py index cfbb15cdef..1ff7195afb 100644 --- a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py +++ b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py @@ -31,8 +31,7 @@ def setUp(self): # Expected arrays if conversion takes place. self.x_converted, self.y_converted = ( - i * geostationary_altitude - for i in (self.x_original, self.y_original) + i * geostationary_altitude for i in (self.x_original, self.y_original) ) def _test(self, geostationary=True): diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn.py b/lib/iris/tests/unit/plot/test__get_plot_defn.py index 512dc7f0b2..4032c8792d 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn.py @@ -20,24 +20,18 @@ class Test_get_plot_defn(tests.IrisTest): def test_axis_order_xy(self): cube_xy = simple_2d() defn = iplt._get_plot_defn(cube_xy, iris.coords.POINT_MODE) - self.assertEqual( - [coord.name() for coord in defn.coords], ["bar", "foo"] - ) + self.assertEqual([coord.name() for coord in defn.coords], ["bar", "foo"]) def test_axis_order_yx(self): cube_yx = simple_2d() cube_yx.transpose() defn = iplt._get_plot_defn(cube_yx, iris.coords.POINT_MODE) - self.assertEqual( - [coord.name() for coord in defn.coords], ["foo", "bar"] - ) + self.assertEqual([coord.name() for coord in defn.coords], ["foo", "bar"]) def test_2d_coords(self): cube = simple_2d_w_multidim_coords() defn = iplt._get_plot_defn(cube, iris.coords.BOUND_MODE) - self.assertEqual( - [coord.name() for coord in defn.coords], ["bar", "foo"] - ) + self.assertEqual([coord.name() for coord in defn.coords], ["bar", "foo"]) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py index dcd8fac9e1..fb6de798e8 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py @@ -10,11 +10,7 @@ import iris.tests as tests # isort:skip from iris.coords import BOUND_MODE, POINT_MODE -from iris.tests.stock import ( - hybrid_height, - simple_2d, - simple_2d_w_multidim_coords, -) +from iris.tests.stock import hybrid_height, simple_2d, simple_2d_w_multidim_coords if tests.MPL_AVAILABLE: import iris.plot as iplt @@ -27,9 +23,7 @@ def test_1d_coords(self): defn = iplt._get_plot_defn_custom_coords_picked( cube, ("foo", "bar"), POINT_MODE ) - self.assertEqual( - [coord.name() for coord in defn.coords], ["bar", "foo"] - ) + self.assertEqual([coord.name() for coord in defn.coords], ["bar", "foo"]) self.assertFalse(defn.transpose) def test_1d_coords_swapped(self): @@ -37,24 +31,18 @@ def test_1d_coords_swapped(self): defn = iplt._get_plot_defn_custom_coords_picked( cube, ("bar", "foo"), POINT_MODE ) - self.assertEqual( - [coord.name() for coord in defn.coords], ["foo", "bar"] - ) + self.assertEqual([coord.name() for coord in defn.coords], ["foo", "bar"]) self.assertTrue(defn.transpose) def test_1d_coords_as_integers(self): cube = simple_2d() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, (1, 0), POINT_MODE - ) + defn = iplt._get_plot_defn_custom_coords_picked(cube, (1, 0), POINT_MODE) self.assertEqual([coord for coord in defn.coords], [0, 1]) self.assertFalse(defn.transpose) def test_1d_coords_as_integers_swapped(self): cube = simple_2d() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, (0, 1), POINT_MODE - ) + defn = iplt._get_plot_defn_custom_coords_picked(cube, (0, 1), POINT_MODE) self.assertEqual([coord for coord in defn.coords], [1, 0]) self.assertTrue(defn.transpose) @@ -63,16 +51,12 @@ def test_2d_coords(self): defn = iplt._get_plot_defn_custom_coords_picked( cube, ("foo", "bar"), BOUND_MODE ) - self.assertEqual( - [coord.name() for coord in defn.coords], ["bar", "foo"] - ) + self.assertEqual([coord.name() for coord in defn.coords], ["bar", "foo"]) self.assertFalse(defn.transpose) def test_2d_coords_as_integers(self): cube = simple_2d_w_multidim_coords() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, (0, 1), BOUND_MODE - ) + defn = iplt._get_plot_defn_custom_coords_picked(cube, (0, 1), BOUND_MODE) self.assertEqual([coord for coord in defn.coords], [1, 0]) self.assertTrue(defn.transpose) diff --git a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py index 8d4054b35a..cac42bb765 100644 --- a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py +++ b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py @@ -32,9 +32,7 @@ def test_preserve_position(self): # get_position returns mpl.transforms.Bbox object, for which equality does # not appear to be implemented. Compare the bounds (tuple) instead. - self.assertEqual( - expected.get_position().bounds, result.get_position().bounds - ) + self.assertEqual(expected.get_position().bounds, result.get_position().bounds) def test_ax_on_subfigure(self): subfig, _ = self.fig.subfigures(nrows=2) diff --git a/lib/iris/tests/unit/plot/test_plot.py b/lib/iris/tests/unit/plot/test_plot.py index db33862a7e..26890175b8 100644 --- a/lib/iris/tests/unit/plot/test_plot.py +++ b/lib/iris/tests/unit/plot/test_plot.py @@ -61,9 +61,7 @@ def test_plot_longitude(self): fig = plt.figure() ax = fig.add_subplot(111) - iplt.plot( - self.lat_lon_cube.coord("longitude"), self.lat_lon_cube, axes=ax - ) + iplt.plot(self.lat_lon_cube.coord("longitude"), self.lat_lon_cube, axes=ax) plt.close(fig) @@ -84,12 +82,8 @@ def lon_lat_coords(self, lons, lats, cs=None): if cs is None: cs = self.geog_cs return ( - coords.AuxCoord( - lons, "longitude", units="degrees", coord_system=cs - ), - coords.AuxCoord( - lats, "latitude", units="degrees", coord_system=cs - ), + coords.AuxCoord(lons, "longitude", units="degrees", coord_system=cs), + coords.AuxCoord(lats, "latitude", units="degrees", coord_system=cs), ) def assertPathsEqual(self, expected, actual): diff --git a/lib/iris/tests/unit/plot/test_scatter.py b/lib/iris/tests/unit/plot/test_scatter.py index f3b2ec1592..21412010ab 100644 --- a/lib/iris/tests/unit/plot/test_scatter.py +++ b/lib/iris/tests/unit/plot/test_scatter.py @@ -53,9 +53,7 @@ def test_scatter_longitude(self): fig = plt.figure() ax = fig.add_subplot(111) - iplt.scatter( - self.lat_lon_cube, self.lat_lon_cube.coord("longitude"), axes=ax - ) + iplt.scatter(self.lat_lon_cube, self.lat_lon_cube.coord("longitude"), axes=ax) plt.close(fig) diff --git a/lib/iris/tests/unit/quickplot/test_pcolor.py b/lib/iris/tests/unit/quickplot/test_pcolor.py index 79f6904e12..fc2ce83f0b 100644 --- a/lib/iris/tests/unit/quickplot/test_pcolor.py +++ b/lib/iris/tests/unit/quickplot/test_pcolor.py @@ -41,9 +41,7 @@ def setUp(self): self.bar_index = np.arange(coord.points.size + 1) self.data = self.cube.data self.dataT = self.data.T - self.mpl_patch = self.patch( - "matplotlib.pyplot.pcolor", return_value=None - ) + self.mpl_patch = self.patch("matplotlib.pyplot.pcolor", return_value=None) self.draw_func = qplt.pcolor diff --git a/lib/iris/tests/unit/quickplot/test_pcolormesh.py b/lib/iris/tests/unit/quickplot/test_pcolormesh.py index 826f0e7121..6ce9d07406 100644 --- a/lib/iris/tests/unit/quickplot/test_pcolormesh.py +++ b/lib/iris/tests/unit/quickplot/test_pcolormesh.py @@ -41,9 +41,7 @@ def setUp(self): self.bar_index = np.arange(coord.points.size + 1) self.data = self.cube.data self.dataT = self.data.T - self.mpl_patch = self.patch( - "matplotlib.pyplot.pcolormesh", return_value=None - ) + self.mpl_patch = self.patch("matplotlib.pyplot.pcolormesh", return_value=None) self.draw_func = qplt.pcolormesh diff --git a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py index 20d5c47e01..d7b617d848 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py @@ -9,13 +9,7 @@ from iris._representation.cube_printout import CubePrinter from iris._representation.cube_summary import CubeSummary -from iris.coords import ( - AncillaryVariable, - AuxCoord, - CellMeasure, - CellMethod, - DimCoord, -) +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, CellMethod, DimCoord from iris.cube import Cube from iris.tests.stock.mesh import sample_mesh_cube @@ -114,9 +108,7 @@ def test_name_padding(self): def test_columns_long_coordname(self): cube = Cube([0], long_name="short", units=1) - coord = AuxCoord( - [0], long_name="very_very_very_very_very_long_coord_name" - ) + coord = AuxCoord([0], long_name="very_very_very_very_very_long_coord_name") cube.add_aux_coord(coord, 0) rep = cube_replines(cube) expected = [ @@ -151,19 +143,11 @@ def test_coord_distinguishing_attributes(self): # include : vector + scalar cube = Cube([0, 1], long_name="name", units=1) # Add a pair of vector coords with same name but different attributes. - cube.add_aux_coord( - AuxCoord([0, 1], long_name="co1", attributes=dict(a=1)), 0 - ) - cube.add_aux_coord( - AuxCoord([0, 1], long_name="co1", attributes=dict(a=2)), 0 - ) + cube.add_aux_coord(AuxCoord([0, 1], long_name="co1", attributes=dict(a=1)), 0) + cube.add_aux_coord(AuxCoord([0, 1], long_name="co1", attributes=dict(a=2)), 0) # Likewise for scalar coords with same name but different attributes. - cube.add_aux_coord( - AuxCoord([0], long_name="co2", attributes=dict(a=10, b=12)) - ) - cube.add_aux_coord( - AuxCoord([1], long_name="co2", attributes=dict(a=10, b=11)) - ) + cube.add_aux_coord(AuxCoord([0], long_name="co2", attributes=dict(a=10, b=12))) + cube.add_aux_coord(AuxCoord([1], long_name="co2", attributes=dict(a=10, b=11))) rep = cube_replines(cube) expected = [ @@ -240,9 +224,7 @@ def test_coord_extra_attributes__string(self): cube = Cube(0, long_name="name", units=1) cube.add_aux_coord(AuxCoord([1], long_name="co")) cube.add_aux_coord( - AuxCoord( - [2], long_name="co", attributes=dict(note="string content") - ) + AuxCoord([2], long_name="co", attributes=dict(note="string content")) ) rep = cube_replines(cube) expected = [ @@ -336,9 +318,7 @@ def test_section_vector_auxcoords(self): def test_section_vector_ancils(self): cube = Cube(np.zeros((2, 3)), long_name="name", units=1) - cube.add_ancillary_variable( - AncillaryVariable([0, 1], long_name="av1"), 0 - ) + cube.add_ancillary_variable(AncillaryVariable([0, 1], long_name="av1"), 0) rep = cube_replines(cube) expected = [ @@ -415,9 +395,7 @@ def test_section_scalar_coords__string(self): "A string value which is very very very very very very " "very very very very very very very very long." ) - cube.add_aux_coord( - AuxCoord([long_string], long_name="very_long_string") - ) + cube.add_aux_coord(AuxCoord([long_string], long_name="very_long_string")) rep = cube_replines(cube) expected = [ diff --git a/lib/iris/tests/unit/representation/cube_printout/test_Table.py b/lib/iris/tests/unit/representation/cube_printout/test_Table.py index ff9b6cf51b..0e00da7c42 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_Table.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_Table.py @@ -127,9 +127,7 @@ def test_unlimited_column(self): table = Table() aligns = ["left", "right", "left"] table.add_row(["a", "beee", "c"], aligns) - table.add_row( - ["abcd", "any-longer-stuff", "this"], aligns, i_col_unlimited=1 - ) + table.add_row(["abcd", "any-longer-stuff", "this"], aligns, i_col_unlimited=1) table.add_row(["12", "x", "yy"], aligns) result = table.formatted_as_strings() self.assertEqual( diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index 1280c3b38f..74b24899b1 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -11,13 +11,7 @@ import numpy as np from iris._representation.cube_summary import CubeSummary -from iris.coords import ( - AncillaryVariable, - AuxCoord, - CellMeasure, - CellMethod, - DimCoord, -) +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, CellMethod, DimCoord from iris.cube import Cube from iris.tests.stock.mesh import sample_mesh_cube @@ -62,9 +56,7 @@ def test_blank_cube(self): "Cell measures:", "Ancillary variables:", ] - self.assertEqual( - list(rep.vector_sections.keys()), expected_vector_sections - ) + self.assertEqual(list(rep.vector_sections.keys()), expected_vector_sections) for title in expected_vector_sections: vector_section = rep.vector_sections[title] self.assertEqual(vector_section.contents, []) @@ -79,9 +71,7 @@ def test_blank_cube(self): "Attributes:", ] - self.assertEqual( - list(rep.scalar_sections.keys()), expected_scalar_sections - ) + self.assertEqual(list(rep.scalar_sections.keys()), expected_scalar_sections) for title in expected_scalar_sections: scalar_section = rep.scalar_sections[title] self.assertEqual(scalar_section.contents, []) @@ -115,9 +105,7 @@ def test_scalar_coord(self): long_name="foo", attributes={"key": 42, "key2": "value-str"}, ) - scalar_coord_awkward_text = AuxCoord( - ["a is\nb\n and c"], long_name="foo_2" - ) + scalar_coord_awkward_text = AuxCoord(["a is\nb\n and c"], long_name="foo_2") cube.add_aux_coord(scalar_coord_no_bounds) cube.add_aux_coord(scalar_coord_with_bounds) cube.add_aux_coord(scalar_coord_simple_text) @@ -219,16 +207,10 @@ def test_scalar_cube(self): self.assertEqual(rep.header.dimension_header.shape, []) self.assertEqual(rep.header.dimension_header.contents, ["scalar cube"]) self.assertEqual(len(rep.vector_sections), 6) - self.assertTrue( - all(sect.is_empty() for sect in rep.vector_sections.values()) - ) + self.assertTrue(all(sect.is_empty() for sect in rep.vector_sections.values())) self.assertEqual(len(rep.scalar_sections), 6) - self.assertEqual( - len(rep.scalar_sections["Scalar coordinates:"].contents), 1 - ) - self.assertTrue( - rep.scalar_sections["Scalar cell measures:"].is_empty() - ) + self.assertEqual(len(rep.scalar_sections["Scalar coordinates:"].contents), 1) + self.assertTrue(rep.scalar_sections["Scalar cell measures:"].is_empty()) self.assertTrue(rep.scalar_sections["Attributes:"].is_empty()) self.assertTrue(rep.scalar_sections["Cell methods:"].is_empty()) @@ -244,9 +226,7 @@ def test_coord_attributes(self): co2_summ = rep.vector_sections["Auxiliary coordinates:"].contents[0] # Notes: 'b' is same so does not appear; sorted order; quoted strings. self.assertEqual(co1_summ.extra, "a=1") - self.assertEqual( - co2_summ.extra, "a=7, text='ok', text2='multi\\nline', z=77" - ) + self.assertEqual(co2_summ.extra, "a=7, text='ok', text2='multi\\nline', z=77") def test_array_attributes(self): cube = self.cube diff --git a/lib/iris/tests/unit/test_Future.py b/lib/iris/tests/unit/test_Future.py index 00f6b82519..74f3c87dec 100644 --- a/lib/iris/tests/unit/test_Future.py +++ b/lib/iris/tests/unit/test_Future.py @@ -50,9 +50,7 @@ def test_deprecated_warning(self): def test_deprecated_error(self): future = patched_future(deprecated=True, error=True) - exp_emsg = ( - "'Future' property 'example_future_flag' has been deprecated" - ) + exp_emsg = "'Future' property 'example_future_flag' has been deprecated" with self.assertRaisesRegex(AttributeError, exp_emsg): future.example_future_flag = False diff --git a/lib/iris/tests/unit/test_sample_data_path.py b/lib/iris/tests/unit/test_sample_data_path.py index aff2c1088f..6378e20575 100644 --- a/lib/iris/tests/unit/test_sample_data_path.py +++ b/lib/iris/tests/unit/test_sample_data_path.py @@ -47,9 +47,7 @@ def test_call(self): def test_file_not_found(self): with mock.patch("iris_sample_data.path", self.sample_dir): - with self.assertRaisesRegex( - ValueError, "Sample data .* not found" - ): + with self.assertRaisesRegex(ValueError, "Sample data .* not found"): sample_data_path("foo") def test_file_absolute(self): @@ -62,15 +60,11 @@ def test_glob_ok(self): sample_glob = "?" + os.path.basename(sample_path)[1:] with mock.patch("iris_sample_data.path", self.sample_dir): result = sample_data_path(sample_glob) - self.assertEqual( - result, os.path.join(self.sample_dir, sample_glob) - ) + self.assertEqual(result, os.path.join(self.sample_dir, sample_glob)) def test_glob_not_found(self): with mock.patch("iris_sample_data.path", self.sample_dir): - with self.assertRaisesRegex( - ValueError, "Sample data .* not found" - ): + with self.assertRaisesRegex(ValueError, "Sample data .* not found"): sample_data_path("foo.*") def test_glob_absolute(self): diff --git a/lib/iris/tests/unit/tests/stock/test_netcdf.py b/lib/iris/tests/unit/tests/stock/test_netcdf.py index eb1c289c37..dbdd321663 100644 --- a/lib/iris/tests/unit/tests/stock/test_netcdf.py +++ b/lib/iris/tests/unit/tests/stock/test_netcdf.py @@ -87,9 +87,7 @@ def create_synthetic_file(self, **create_kwargs): def test_basic_load(self): cube = self.create_synthetic_test_cube() - self.check_cube( - cube, shape=(1, 38, 866), location="face", level="half" - ) + self.check_cube(cube, shape=(1, 38, 866), location="face", level="half") def test_scale_mesh(self): cube = self.create_synthetic_test_cube(n_faces=10) @@ -97,15 +95,11 @@ def test_scale_mesh(self): def test_scale_time(self): cube = self.create_synthetic_test_cube(n_times=3) - self.check_cube( - cube, shape=(3, 38, 866), location="face", level="half" - ) + self.check_cube(cube, shape=(3, 38, 866), location="face", level="half") def test_scale_levels(self): cube = self.create_synthetic_test_cube(n_levels=10) - self.check_cube( - cube, shape=(1, 10, 866), location="face", level="half" - ) + self.check_cube(cube, shape=(1, 10, 866), location="face", level="half") class Test_create_file__xios_3d_face_full_levels(XIOSFileMixin): @@ -116,9 +110,7 @@ def create_synthetic_file(self, **create_kwargs): def test_basic_load(self): cube = self.create_synthetic_test_cube() - self.check_cube( - cube, shape=(1, 39, 866), location="face", level="full" - ) + self.check_cube(cube, shape=(1, 39, 866), location="face", level="full") def test_scale_mesh(self): cube = self.create_synthetic_test_cube(n_faces=10) @@ -126,15 +118,11 @@ def test_scale_mesh(self): def test_scale_time(self): cube = self.create_synthetic_test_cube(n_times=3) - self.check_cube( - cube, shape=(3, 39, 866), location="face", level="full" - ) + self.check_cube(cube, shape=(3, 39, 866), location="face", level="full") def test_scale_levels(self): cube = self.create_synthetic_test_cube(n_levels=10) - self.check_cube( - cube, shape=(1, 10, 866), location="face", level="full" - ) + self.check_cube(cube, shape=(1, 10, 866), location="face", level="full") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/time/test_PartialDateTime.py b/lib/iris/tests/unit/time/test_PartialDateTime.py index 6ed00943b9..38a7747965 100644 --- a/lib/iris/tests/unit/time/test_PartialDateTime.py +++ b/lib/iris/tests/unit/time/test_PartialDateTime.py @@ -98,30 +98,22 @@ def test_null(self): self._test_dt(PartialDateTime(), "null") def test_item1_lo(self): - self._test_dt( - PartialDateTime(year=2011, month=3, second=2), "item1_lo" - ) + self._test_dt(PartialDateTime(year=2011, month=3, second=2), "item1_lo") def test_item1_hi(self): self._test_dt(PartialDateTime(year=2015, month=3, day=24), "item1_hi") def test_item2_lo(self): - self._test_dt( - PartialDateTime(year=2013, month=1, second=2), "item2_lo" - ) + self._test_dt(PartialDateTime(year=2013, month=1, second=2), "item2_lo") def test_item2_hi(self): self._test_dt(PartialDateTime(year=2013, month=5, day=24), "item2_hi") def test_item3_lo(self): - self._test_dt( - PartialDateTime(year=2013, month=3, second=1), "item3_lo" - ) + self._test_dt(PartialDateTime(year=2013, month=3, second=1), "item3_lo") def test_item3_hi(self): - self._test_dt( - PartialDateTime(year=2013, month=3, second=42), "item3_hi" - ) + self._test_dt(PartialDateTime(year=2013, month=3, second=42), "item3_hi") def test_mix_hi_lo(self): self._test_dt(PartialDateTime(year=2015, month=1, day=24), "mix_hi_lo") @@ -143,9 +135,7 @@ def test_pdt_diff_fewer_fields(self): self._test_pdt(PartialDateTime(year=2013), "pdt_diff_fewer") def test_pdt_diff_more_fields(self): - self._test_pdt( - PartialDateTime(year=2013, day=24, hour=12), "pdt_diff_more" - ) + self._test_pdt(PartialDateTime(year=2013, day=24, hour=12), "pdt_diff_more") def test_pdt_diff_no_fields(self): pdt1 = PartialDateTime() diff --git a/lib/iris/tests/unit/util/test__slice_data_with_keys.py b/lib/iris/tests/unit/util/test__slice_data_with_keys.py index 9c93041521..a46657a3e4 100644 --- a/lib/iris/tests/unit/util/test__slice_data_with_keys.py +++ b/lib/iris/tests/unit/util/test__slice_data_with_keys.py @@ -40,9 +40,7 @@ def __getitem__(self, keys): shape_array = np.zeros(self.shape) shape_array = shape_array.__getitem__(keys) new_shape = shape_array.shape - return DummyArray( - new_shape, _indexing_record_list=self._getitem_call_keys - ) + return DummyArray(new_shape, _indexing_record_list=self._getitem_call_keys) class Indexer: @@ -104,9 +102,7 @@ def test_1d_all(self): def test_1d_tuple(self): # The call makes tuples into 1-D arrays, and a trailing Ellipsis is # added (for the 1-D case only). - self.check( - (3,), Index[((2, 0, 1),)], [(np.array([2, 0, 1]), Ellipsis)] - ) + self.check((3,), Index[((2, 0, 1),)], [(np.array([2, 0, 1]), Ellipsis)]) def test_fail_1d_2keys(self): msg = "More slices .* than dimensions" @@ -226,15 +222,11 @@ def test_2d_losedim0(self): self.check((3, 4), Index[1, :], expect_map={None: None, 0: None, 1: 0}) def test_2d_losedim1(self): - self.check( - (3, 4), Index[1:4, 2], expect_map={None: None, 0: 0, 1: None} - ) + self.check((3, 4), Index[1:4, 2], expect_map={None: None, 0: 0, 1: None}) def test_2d_loseboth(self): # Two indices give scalar result. - self.check( - (3, 4), Index[1, 2], expect_map={None: None, 0: None, 1: None} - ) + self.check((3, 4), Index[1, 2], expect_map={None: None, 0: None, 1: None}) def test_3d_losedim1(self): # Cutting out the middle dim. @@ -267,9 +259,7 @@ def test_1d_all(self): self.check([1, 2, 3], Index[:], [1, 2, 3], {None: None, 0: 0}) def test_1d_tuple(self): - self.check( - [1, 2, 3], Index[((2, 0, 1, 0),)], [3, 1, 2, 1], {None: None, 0: 0} - ) + self.check([1, 2, 3], Index[((2, 0, 1, 0),)], [3, 1, 2, 1], {None: None, 0: 0}) def test_fail_1d_2keys(self): msg = "More slices .* than dimensions" diff --git a/lib/iris/tests/unit/util/test_broadcast_to_shape.py b/lib/iris/tests/unit/util/test_broadcast_to_shape.py index c060967edf..6e32d6389d 100644 --- a/lib/iris/tests/unit/util/test_broadcast_to_shape.py +++ b/lib/iris/tests/unit/util/test_broadcast_to_shape.py @@ -72,9 +72,7 @@ def test_lazy_masked(self, mocked_compute): mocked_compute.assert_not_called() for i in range(5): for j in range(4): - self.assertMaskedArrayEqual( - b[i, :, j, :].compute().T, m.compute() - ) + self.assertMaskedArrayEqual(b[i, :, j, :].compute().T, m.compute()) def test_masked_degenerate(self): # masked arrays can have degenerate masks too diff --git a/lib/iris/tests/unit/util/test_equalise_attributes.py b/lib/iris/tests/unit/util/test_equalise_attributes.py index de5308a7fa..8cc4936908 100644 --- a/lib/iris/tests/unit/util/test_equalise_attributes.py +++ b/lib/iris/tests/unit/util/test_equalise_attributes.py @@ -123,9 +123,7 @@ def test_array_extra(self): def test_array_different(self): cubes = [self.cube_a1b5v1, self.cube_a1b6v2] - self._test( - cubes, {"a": 1}, [{"b": 5, "v": self.v1}, {"b": 6, "v": self.v2}] - ) + self._test(cubes, {"a": 1}, [{"b": 5, "v": self.v1}, {"b": 6, "v": self.v2}]) def test_array_same(self): cubes = [self.cube_a1b5v1, self.cube_a1b6v1] @@ -148,9 +146,7 @@ def test_complex_somecommon(self): self._test( cubes, { - "STASH": iris.fileformats.pp.STASH( - model=1, section=16, item=203 - ), + "STASH": iris.fileformats.pp.STASH(model=1, section=16, item=203), "source": "Data from Met Office Unified Model", }, [{}, {}], @@ -254,13 +250,10 @@ class TestNonCube: def test(self): attrs = [1, 1, 2] coords = [ - AuxCoord([0], attributes={"a": attr, "b": "all_the_same"}) - for attr in attrs + AuxCoord([0], attributes={"a": attr, "b": "all_the_same"}) for attr in attrs ] equalise_attributes(coords) - assert all( - coord.attributes == {"b": "all_the_same"} for coord in coords - ) + assert all(coord.attributes == {"b": "all_the_same"} for coord in coords) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py index c27f4f1dcb..2630caa0e5 100644 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ b/lib/iris/tests/unit/util/test_file_is_newer_than.py @@ -60,9 +60,7 @@ def _test(self, boolean_result, result_name, source_names): else: source_paths = [self._name2path(name) for name in source_names] # Check result is as expected. - self.assertEqual( - boolean_result, file_is_newer_than(result_path, source_paths) - ) + self.assertEqual(boolean_result, file_is_newer_than(result_path, source_paths)) def test_no_sources(self): self._test(True, "example_result", []) @@ -84,9 +82,7 @@ def test_single_fail(self): self._test(False, "example_result", ["newer_source_2"]) def test_multiple_ok(self): - self._test( - True, "example_result", ["older_source_1", "older_source_2"] - ) + self._test(True, "example_result", ["older_source_1", "older_source_2"]) def test_multiple_fail(self): self._test( diff --git a/lib/iris/tests/unit/util/test_find_discontiguities.py b/lib/iris/tests/unit/util/test_find_discontiguities.py index 6965541320..04bf8b40c1 100644 --- a/lib/iris/tests/unit/util/test_find_discontiguities.py +++ b/lib/iris/tests/unit/util/test_find_discontiguities.py @@ -39,9 +39,7 @@ def setUp(self): ) # Repeat for a discontiguity on the topmost bounds. self.testcube_discontig_top = full2d_global() - make_bounds_discontiguous_at_point( - self.testcube_discontig_top, 2, 4, in_y=True - ) + make_bounds_discontiguous_at_point(self.testcube_discontig_top, 2, 4, in_y=True) # Repeat for a discontiguity on the botommost bounds. self.testcube_discontig_along_bottom = full2d_global() diff --git a/lib/iris/tests/unit/util/test_mask_cube.py b/lib/iris/tests/unit/util/test_mask_cube.py index 7237f0491c..0caa70cff2 100644 --- a/lib/iris/tests/unit/util/test_mask_cube.py +++ b/lib/iris/tests/unit/util/test_mask_cube.py @@ -117,9 +117,7 @@ def test_mask_cube_2d_first_dim(self): np.testing.assert_array_equal(subcube.data.mask, mask_coord.points) def test_mask_cube_2d_second_dim(self): - mask_coord = iris.coords.AuxCoord( - [0, 0, 1, 1], long_name="mask", units=1 - ) + mask_coord = iris.coords.AuxCoord([0, 0, 1, 1], long_name="mask", units=1) returned = mask_cube(self.cube, mask_coord, in_place=False, dim=1) self.assertOriginalMetadata(returned, "simple_2d") for subcube in returned.slices("foo"): @@ -155,9 +153,7 @@ def test_mask_cube_2d_first_dim_in_place(self): self.assertIs(returned, None) def test_mask_cube_2d_create_new_dim(self): - mask = iris.cube.Cube( - [[0, 1, 0], [0, 0, 1]], long_name="mask", units=1 - ) + mask = iris.cube.Cube([[0, 1, 0], [0, 0, 1]], long_name="mask", units=1) broadcast_coord = iris.coords.DimCoord([1, 2], long_name="baz") mask.add_dim_coord(broadcast_coord, 0) diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py index 197c06e449..5ba0496854 100644 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ b/lib/iris/tests/unit/util/test_new_axis.py @@ -46,9 +46,7 @@ def _assert_cube_notis(self, cube_a, cube_b): for cm_a, cm_b in zip(cube_a.cell_measures(), cube_b.cell_measures()): assert cm_a is not cm_b - for factory_a, factory_b in zip( - cube_a.aux_factories, cube_b.aux_factories - ): + for factory_a, factory_b in zip(cube_a.aux_factories, cube_b.aux_factories): assert factory_a is not factory_b def test_promote_no_coord(self, stock_cube): @@ -87,9 +85,7 @@ def test_promote_scalar_dimcoord(self, stock_cube): assert result == expected # Explicitly check time has been made a cube dim coord as cube equality # does not check this. - assert result.coord("time") in [ - item[0] for item in result._dim_coords_and_dims - ] + assert result.coord("time") in [item[0] for item in result._dim_coords_and_dims] self._assert_cube_notis(result, stock_cube) def test_promote_scalar_auxcoord(self, stock_cube): @@ -121,9 +117,7 @@ def test_existing_dim_coord(self, stock_cube): stock_cube.add_aux_coord(coord) new_cube = iris.util.new_axis(stock_cube, coord) - with pytest.raises( - ValueError, match="is already a dimension coordinate." - ): + with pytest.raises(ValueError, match="is already a dimension coordinate."): iris.util.new_axis(new_cube, coord) def test_promote_non_scalar(self, stock_cube): @@ -296,9 +290,7 @@ def test_expand_ancil_var(self, stock_cube): self._assert_cube_notis(result, stock_cube) def test_expand_multiple(self, stock_cube): - result = new_axis( - stock_cube, "time", expand_extras=["wibble", "cell_area"] - ) + result = new_axis(stock_cube, "time", expand_extras=["wibble", "cell_area"]) expected = iris.cube.Cube( stock_cube.data[None], long_name="thingness", units="1" diff --git a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py index 8ad9cbf4c2..4631f910a9 100644 --- a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py +++ b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py @@ -20,9 +20,7 @@ def test_dimension_already_has_dimcoord(self): cube_a = stock.hybrid_height() cube_b = cube_a.copy() promote_aux_coord_to_dim_coord(cube_b, "model_level_number") - self.assertEqual( - cube_b.dim_coords, (cube_a.coord("model_level_number"),) - ) + self.assertEqual(cube_b.dim_coords, (cube_a.coord("model_level_number"),)) def test_old_dim_coord_is_now_aux_coord(self): cube_a = stock.hybrid_height() diff --git a/lib/iris/tests/unit/util/test_reverse.py b/lib/iris/tests/unit/util/test_reverse.py index b6da468e7f..562447aaf7 100644 --- a/lib/iris/tests/unit/util/test_reverse.py +++ b/lib/iris/tests/unit/util/test_reverse.py @@ -63,9 +63,7 @@ def test_single_array(self): with self.assertRaisesRegex(ValueError, msg): reverse(a, [0, -1]) - with self.assertRaisesRegex( - TypeError, "To reverse an array, provide an int *" - ): + with self.assertRaisesRegex(TypeError, "To reverse an array, provide an int *"): reverse(a, "latitude") @@ -94,35 +92,21 @@ def setUp(self): aux_coords_and_dims=[(self.span, (0, 1))], ) - self.cube2 = iris.cube.Cube( - data, dim_coords_and_dims=[(a2, 0), (b2, 1)] - ) + self.cube2 = iris.cube.Cube(data, dim_coords_and_dims=[(a2, 0), (b2, 1)]) def check_coorda_reversed(self, result): - self.assertArrayEqual( - self.cube2.coord("a").points, result.coord("a").points - ) - self.assertArrayEqual( - self.cube2.coord("a").bounds, result.coord("a").bounds - ) + self.assertArrayEqual(self.cube2.coord("a").points, result.coord("a").points) + self.assertArrayEqual(self.cube2.coord("a").bounds, result.coord("a").bounds) def check_coorda_unchanged(self, result): - self.assertArrayEqual( - self.cube1.coord("a").points, result.coord("a").points - ) - self.assertArrayEqual( - self.cube1.coord("a").bounds, result.coord("a").bounds - ) + self.assertArrayEqual(self.cube1.coord("a").points, result.coord("a").points) + self.assertArrayEqual(self.cube1.coord("a").bounds, result.coord("a").bounds) def check_coordb_reversed(self, result): - self.assertArrayEqual( - self.cube2.coord("b").points, result.coord("b").points - ) + self.assertArrayEqual(self.cube2.coord("b").points, result.coord("b").points) def check_coordb_unchanged(self, result): - self.assertArrayEqual( - self.cube1.coord("b").points, result.coord("b").points - ) + self.assertArrayEqual(self.cube1.coord("b").points, result.coord("b").points) def test_cube_dim0(self): cube1_reverse0 = reverse(self.cube1, 0) @@ -141,9 +125,7 @@ def test_cube_dim1(self): def test_cube_dim_both(self): cube1_reverse_both = reverse(self.cube1, (0, 1)) - self.assertArrayEqual( - self.cube1.data[::-1, ::-1], cube1_reverse_both.data - ) + self.assertArrayEqual(self.cube1.data[::-1, ::-1], cube1_reverse_both.data) self.check_coorda_reversed(cube1_reverse_both) self.check_coordb_reversed(cube1_reverse_both) @@ -164,18 +146,14 @@ def test_cube_coord1(self): def test_cube_coord_both(self): cube1_reverse_both = reverse(self.cube1, (self.a1, self.b1)) - self.assertArrayEqual( - self.cube1.data[::-1, ::-1], cube1_reverse_both.data - ) + self.assertArrayEqual(self.cube1.data[::-1, ::-1], cube1_reverse_both.data) self.check_coorda_reversed(cube1_reverse_both) self.check_coordb_reversed(cube1_reverse_both) def test_cube_coord_spanning(self): cube1_reverse_spanning = reverse(self.cube1, "spanning") - self.assertArrayEqual( - self.cube1.data[::-1, ::-1], cube1_reverse_spanning.data - ) + self.assertArrayEqual(self.cube1.data[::-1, ::-1], cube1_reverse_spanning.data) self.check_coorda_reversed(cube1_reverse_spanning) self.check_coordb_reversed(cube1_reverse_spanning) @@ -185,12 +163,8 @@ def test_cube_coord_spanning(self): ) def test_wrong_coord_name(self): - msg = ( - "Expected to find exactly 1 'latitude' coordinate, but found none." - ) - with self.assertRaisesRegex( - iris.exceptions.CoordinateNotFoundError, msg - ): + msg = "Expected to find exactly 1 'latitude' coordinate, but found none." + with self.assertRaisesRegex(iris.exceptions.CoordinateNotFoundError, msg): reverse(self.cube1, "latitude") def test_empty_list(self): @@ -207,10 +181,7 @@ def test_wrong_type_cube(self): reverse(self.cube1, self.cube1) def test_wrong_type_float(self): - msg = ( - "coords_or_dims must be int, str, coordinate or sequence of " - "these." - ) + msg = "coords_or_dims must be int, str, coordinate or sequence of these." with self.assertRaisesRegex(TypeError, msg): reverse(self.cube1, 3.0) diff --git a/lib/iris/tests/unit/util/test_rolling_window.py b/lib/iris/tests/unit/util/test_rolling_window.py index 533e5d5633..8a017e4e08 100644 --- a/lib/iris/tests/unit/util/test_rolling_window.py +++ b/lib/iris/tests/unit/util/test_rolling_window.py @@ -18,9 +18,7 @@ class Test_rolling_window(tests.IrisTest): def test_1d(self): # 1-d array input a = np.array([0, 1, 2, 3, 4], dtype=np.int32) - expected_result = np.array( - [[0, 1], [1, 2], [2, 3], [3, 4]], dtype=np.int32 - ) + expected_result = np.array([[0, 1], [1, 2], [2, 3], [3, 4]], dtype=np.int32) result = rolling_window(a, window=2) self.assertArrayEqual(result, expected_result) diff --git a/lib/iris/tests/unit/util/test_unify_time_units.py b/lib/iris/tests/unit/util/test_unify_time_units.py index 2d7a3b6d64..c70bb25a0f 100644 --- a/lib/iris/tests/unit/util/test_unify_time_units.py +++ b/lib/iris/tests/unit/util/test_unify_time_units.py @@ -90,9 +90,7 @@ def test_time_coord_only_in_some_cubes(self): def test_multiple_time_coords_in_cube(self): cube0, cube1 = self.simple_1d_time_cubes() - units = cf_units.Unit( - "days since 1980-05-02 00:00:00", calendar="standard" - ) + units = cf_units.Unit("days since 1980-05-02 00:00:00", calendar="standard") aux_coord = iris.coords.AuxCoord( 72, standard_name="forecast_reference_time", units=units ) @@ -134,9 +132,7 @@ def test_units_bounded_dtype_ints(self): def test_units_dtype_int_float(self): cube0, cube1 = self.simple_1d_time_cubes() cube0.coord("time").points = np.array([1, 2, 3, 4, 5], dtype=int) - cube1.coord("time").points = np.array( - [1, 2, 3, 4, 5], dtype=np.float64 - ) + cube1.coord("time").points = np.array([1, 2, 3, 4, 5], dtype=np.float64) cubelist = iris.cube.CubeList([cube0, cube1]) unify_time_units(cubelist) assert len(cubelist.concatenate()) == 1 diff --git a/lib/iris/util.py b/lib/iris/util.py index 0e4e951350..95b65bcbcd 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -279,9 +279,10 @@ def guess_coord_axis(coord): "projection_y_coordinate", ): axis = "Y" - elif coord.units.is_convertible("hPa") or coord.attributes.get( - "positive" - ) in ("up", "down"): + elif coord.units.is_convertible("hPa") or coord.attributes.get("positive") in ( + "up", + "down", + ): axis = "Z" elif coord.units.is_time_reference(): axis = "T" @@ -357,9 +358,7 @@ def rolling_window(a, window=1, step=1, axis=-1): ) rw = ma.array( rw, - mask=np.lib.stride_tricks.as_strided( - mask, shape=shape, strides=strides - ), + mask=np.lib.stride_tricks.as_strided(mask, shape=shape, strides=strides), ) return rw @@ -549,9 +548,7 @@ def reverse(cube_or_array, coords_or_dims): "sequence of these. Got cube." ) - if isinstance(coords_or_dims, str) or not isinstance( - coords_or_dims, Iterable - ): + if isinstance(coords_or_dims, str) or not isinstance(coords_or_dims, Iterable): coords_or_dims = [coords_or_dims] axes = set() @@ -559,9 +556,7 @@ def reverse(cube_or_array, coords_or_dims): if isinstance(coord_or_dim, int): axes.add(coord_or_dim) elif isinstance(cube_or_array, np.ndarray): - raise TypeError( - "To reverse an array, provide an int or sequence of ints." - ) + raise TypeError("To reverse an array, provide an int or sequence of ints.") else: try: axes.update(cube_or_array.coord_dims(coord_or_dim)) @@ -624,8 +619,7 @@ def monotonic(array, strict=False, return_direction=False): """ if array.ndim != 1 or len(array) <= 1: raise ValueError( - "The array to check must be 1 dimensional and have " - "more than 1 element." + "The array to check must be 1 dimensional and have more than 1 element." ) if ma.isMaskedArray(array) and ma.count_masked(array) != 0: @@ -694,13 +688,9 @@ def column_slices_generator(full_slice, ndims): # Get all of the dimensions for which a tuple of indices were provided # (numpy.ndarrays are treated in the same way tuples in this case) def is_tuple_style_index(key): - return isinstance(key, tuple) or ( - isinstance(key, np.ndarray) and key.ndim == 1 - ) + return isinstance(key, tuple) or (isinstance(key, np.ndarray) and key.ndim == 1) - tuple_indices = [ - i for i, key in enumerate(full_slice) if is_tuple_style_index(key) - ] + tuple_indices = [i for i, key in enumerate(full_slice) if is_tuple_style_index(key)] # stg1: Take a copy of the full_slice specification, turning all tuples # into a full slice @@ -776,10 +766,7 @@ def _build_full_slice_given_keys(keys, ndim): # replace any subsequent Ellipsis objects in keys with # slice(None, None) as per Numpy keys = keys[:i] + tuple( - [ - slice(None, None) if key is Ellipsis else key - for key in keys[i:] - ] + [slice(None, None) if key is Ellipsis else key for key in keys[i:]] ) # iterate over the remaining keys in reverse to fill in @@ -869,16 +856,12 @@ def _wrap_function_for_method(function, docstring=None): else: cutoff = -len(defaults) basic_args = ["self"] + args[1:cutoff] - default_args = [ - "%s=%r" % pair for pair in zip(args[cutoff:], defaults) - ] + default_args = ["%s=%r" % pair for pair in zip(args[cutoff:], defaults)] simple_default_args = args[cutoff:] var_arg = [] if varargs is None else ["*" + varargs] var_kw = [] if varkw is None else ["**" + varkw] arg_source = ", ".join(basic_args + default_args + var_arg + var_kw) - simple_arg_source = ", ".join( - basic_args + simple_default_args + var_arg + var_kw - ) + simple_arg_source = ", ".join(basic_args + simple_default_args + var_arg + var_kw) source = "def %s(%s):\n return function(%s)" % ( function.__name__, arg_source, @@ -993,14 +976,10 @@ def _as_tuple(self): # Prevent attribute updates def __setattr__(self, name, value): - raise AttributeError( - "Instances of %s are immutable" % type(self).__name__ - ) + raise AttributeError("Instances of %s are immutable" % type(self).__name__) def __delattr__(self, name): - raise AttributeError( - "Instances of %s are immutable" % type(self).__name__ - ) + raise AttributeError("Instances of %s are immutable" % type(self).__name__) # Provide hash semantics @@ -1011,10 +990,7 @@ def __hash__(self): return hash(self._identity()) def __eq__(self, other): - return ( - isinstance(other, type(self)) - and self._identity() == other._identity() - ) + return isinstance(other, type(self)) and self._identity() == other._identity() def __ne__(self, other): # Since we've defined __eq__ we should also define __ne__. @@ -1181,9 +1157,7 @@ def _reshape_data_array(data_manager): new_data = data_manager.data[None] return new_data - def _handle_dimensional_metadata( - cube, dm_item, cube_add_method, expand_extras - ): + def _handle_dimensional_metadata(cube, dm_item, cube_add_method, expand_extras): cube_dims = dm_item.cube_dims(cube) if dm_item in expand_extras: if cube_dims == (): @@ -1195,9 +1169,7 @@ def _handle_dimensional_metadata( new_dm_item = dm_item.__class__(new_values, **kwargs) try: if dm_item.has_bounds(): - new_dm_item.bounds = _reshape_data_array( - dm_item._bounds_dm - ) + new_dm_item.bounds = _reshape_data_array(dm_item._bounds_dm) except AttributeError: pass else: @@ -1218,9 +1190,7 @@ def _handle_dimensional_metadata( emsg = scalar_coord.name() + " is not a scalar coordinate." raise ValueError(emsg) - expand_extras = [ - src_cube._dimensional_metadata(item) for item in expand_extras - ] + expand_extras = [src_cube._dimensional_metadata(item) for item in expand_extras] new_cube = iris.cube.Cube(_reshape_data_array(src_cube._data_manager)) new_cube.metadata = src_cube.metadata @@ -1249,9 +1219,7 @@ def _handle_dimensional_metadata( ) nonderived_coords = src_cube.dim_coords + src_cube.aux_coords - coord_mapping = { - id(old_co): new_cube.coord(old_co) for old_co in nonderived_coords - } + coord_mapping = {id(old_co): new_cube.coord(old_co) for old_co in nonderived_coords} for factory in src_cube.aux_factories: new_factory = factory.updated(coord_mapping) new_cube.add_aux_factory(new_factory) @@ -1288,9 +1256,7 @@ def squeeze(cube): """ - slices = [ - 0 if cube.shape[dim] == 1 else slice(None) for dim in range(cube.ndim) - ] + slices = [0 if cube.shape[dim] == 1 else slice(None) for dim in range(cube.ndim)] squeezed = cube[tuple(slices)] @@ -1485,9 +1451,7 @@ def unify_time_units(cubes): if time_coord.units.is_time_reference(): time_coord.points = time_coord.core_points().astype("float64") if time_coord.bounds is not None: - time_coord.bounds = time_coord.core_bounds().astype( - "float64" - ) + time_coord.bounds = time_coord.core_bounds().astype("float64") epoch = epochs.setdefault( time_coord.units.calendar, time_coord.units.origin ) @@ -1678,9 +1642,7 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): msg = msg.format(aux_coord.name(), str(valerr)) raise ValueError(msg) - old_dim_coord = cube.coords( - dim_coords=True, contains_dimension=coord_dim[0] - ) + old_dim_coord = cube.coords(dim_coords=True, contains_dimension=coord_dim[0]) if len(old_dim_coord) == 1: demote_dim_coord_to_aux_coord(cube, old_dim_coord[0]) @@ -1897,13 +1859,9 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): rtol=rel_tol, atol=abs_tol ) - bad_points_boolean[:, :-1] = np.logical_or( - bad_points_boolean[:, :-1], diffs_x - ) + bad_points_boolean[:, :-1] = np.logical_or(bad_points_boolean[:, :-1], diffs_x) # apply mask for y-direction discontiguities: - bad_points_boolean[:-1, :] = np.logical_or( - bad_points_boolean[:-1, :], diffs_y - ) + bad_points_boolean[:-1, :] = np.logical_or(bad_points_boolean[:-1, :], diffs_y) return bad_points_boolean @@ -1927,9 +1885,7 @@ def _mask_array(array, points_to_mask, in_place=False): if not is_lazy_data(array) and in_place: # Non-lazy array and lazy mask should not come up for in_place # case, due to _binary_op_common handling added at #3790. - raise TypeError( - "Cannot apply lazy mask in-place to a non-lazy array." - ) + raise TypeError("Cannot apply lazy mask in-place to a non-lazy array.") in_place = False elif in_place and not isinstance(array, ma.MaskedArray): @@ -2074,9 +2030,7 @@ def equalise_attributes(cubes): # so the routine works on *other* objects bearing attributes, i.e. not just Cubes. # That is also important since the original code allows that (though the docstring # does not admit it). - cube_attrs = [ - _convert_splitattrs_to_pairedkeys_dict(dic) for dic in cube_attrs - ] + cube_attrs = [_convert_splitattrs_to_pairedkeys_dict(dic) for dic in cube_attrs] # Work out which attributes are identical across all the cubes. common_keys = list(cube_attrs[0].keys()) diff --git a/noxfile.py b/noxfile.py index 601a1d576e..9a27b5db98 100755 --- a/noxfile.py +++ b/noxfile.py @@ -36,9 +36,7 @@ def session_lockfile(session: nox.sessions.Session) -> Path: """Return the path of the session lockfile.""" - return Path( - f"requirements/locks/py{session.python.replace('.', '')}-linux-64.lock" - ) + return Path(f"requirements/locks/py{session.python.replace('.', '')}-linux-64.lock") def session_cachefile(session: nox.sessions.Session) -> Path: @@ -288,9 +286,7 @@ def wheel(session: nox.sessions.Session): if len(fname) == 0: raise ValueError("Cannot find wheel to install.") if len(fname) > 1: - emsg = ( - f"Expected to find 1 wheel to install, found {len(fname)} instead." - ) + emsg = f"Expected to find 1 wheel to install, found {len(fname)} instead." raise ValueError(emsg) session.install(fname[0].name) session.run( diff --git a/pyproject.toml b/pyproject.toml index 805e5bb47c..56c8facbff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,7 +67,7 @@ extend-exclude = [ "docs/src/sphinxext", "tools", ] -line-length = 79 +line-length = 88 src = [ "benchmarks", "lib", @@ -110,7 +110,7 @@ local_scheme = "dirty-tag" version_scheme = "release-branch-semver" [tool.black] -line-length = 79 +line-length = 88 target-version = ['py39'] include = '\.pyi?$' extend-exclude = ''' @@ -128,7 +128,7 @@ extend-exclude = ''' [tool.isort] force_sort_within_sections = "True" known_first_party = "iris" -line_length = 79 +line_length = 88 profile = "black" extend_skip = [ "_build", diff --git a/setup.py b/setup.py index 061b35c262..dbba57306b 100644 --- a/setup.py +++ b/setup.py @@ -45,10 +45,7 @@ def finalize_options(self): # Execute the parent "cmd" class method. cmd.finalize_options(self) - if ( - not hasattr(self, "editable_mode") - or self.editable_mode is None - ): + if not hasattr(self, "editable_mode") or self.editable_mode is None: # Default to editable i.e., applicable to "std_names" and # and "develop" commands. self.editable_mode = True @@ -78,9 +75,7 @@ def run(self): custom_commands = { "develop": custom_command(develop), "build_py": custom_command(build_py), - "std_names": custom_command( - BaseCommand, help="generate CF standard names" - ), + "std_names": custom_command(BaseCommand, help="generate CF standard names"), } From 2b764a384894d0eb5e00eddfd1dd98faa76ccab7 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Wed, 13 Dec 2023 14:34:48 +0000 Subject: [PATCH 112/134] Replace isort with ruff (#5633) * Replace isort with ruff * ruff isort changes * add whatsnew entry * remove tool.isort configuration --- .pre-commit-config.yaml | 7 ------- .ruff.toml | 4 ---- benchmarks/benchmarks/cperf/equality.py | 2 +- benchmarks/benchmarks/cperf/load.py | 2 +- benchmarks/benchmarks/cperf/save.py | 2 +- benchmarks/benchmarks/sperf/equality.py | 2 +- benchmarks/benchmarks/sperf/load.py | 2 +- docs/src/common_links.inc | 1 + docs/src/whatsnew/latest.rst | 5 ++++- lib/iris/common/resolve.py | 2 +- lib/iris/tests/experimental/test_raster.py | 2 +- lib/iris/tests/graphics/__init__.py | 2 +- pyproject.toml | 18 ++++-------------- 13 files changed, 17 insertions(+), 34 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cc6579a536..0b5977998c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -55,13 +55,6 @@ repos: - id: flake8 types: [file, python] -- repo: https://github.com/pycqa/isort - rev: 5.13.0 - hooks: - - id: isort - types: [file, python] - args: [--filter-files] - - repo: https://github.com/asottile/blacken-docs rev: 1.16.0 hooks: diff --git a/.ruff.toml b/.ruff.toml index 0702e77757..1f1f23b0b8 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -17,10 +17,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#mccabe-c90 "C90", - # isort (I) - # https://docs.astral.sh/ruff/rules/#isort-i - "I", - # pep8-naming (N) # https://docs.astral.sh/ruff/rules/#pep8-naming-n "N", diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py index f27558a5ed..ea05cd6bf6 100644 --- a/benchmarks/benchmarks/cperf/equality.py +++ b/benchmarks/benchmarks/cperf/equality.py @@ -5,8 +5,8 @@ """ Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ -from . import SingleDiagnosticMixin from .. import on_demand_benchmark +from . import SingleDiagnosticMixin class EqualityMixin(SingleDiagnosticMixin): diff --git a/benchmarks/benchmarks/cperf/load.py b/benchmarks/benchmarks/cperf/load.py index efbd497e2e..e67c095973 100644 --- a/benchmarks/benchmarks/cperf/load.py +++ b/benchmarks/benchmarks/cperf/load.py @@ -5,8 +5,8 @@ """ File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ -from . import SingleDiagnosticMixin from .. import on_demand_benchmark +from . import SingleDiagnosticMixin @on_demand_benchmark diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py index fe7ee8b4d2..95814df53a 100644 --- a/benchmarks/benchmarks/cperf/save.py +++ b/benchmarks/benchmarks/cperf/save.py @@ -8,9 +8,9 @@ from iris import save -from . import _N_CUBESPHERE_UM_EQUIVALENT, _UM_DIMS_YX from .. import TrackAddedMemoryAllocation, on_demand_benchmark from ..generate_data.ugrid import make_cube_like_2d_cubesphere, make_cube_like_umfield +from . import _N_CUBESPHERE_UM_EQUIVALENT, _UM_DIMS_YX @on_demand_benchmark diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py index bb3b707a75..f6a3f547fa 100644 --- a/benchmarks/benchmarks/sperf/equality.py +++ b/benchmarks/benchmarks/sperf/equality.py @@ -5,8 +5,8 @@ """ Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ -from . import FileMixin from .. import on_demand_benchmark +from . import FileMixin @on_demand_benchmark diff --git a/benchmarks/benchmarks/sperf/load.py b/benchmarks/benchmarks/sperf/load.py index ab1273e288..54c8b3eddb 100644 --- a/benchmarks/benchmarks/sperf/load.py +++ b/benchmarks/benchmarks/sperf/load.py @@ -5,8 +5,8 @@ """ File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ -from . import FileMixin from .. import on_demand_benchmark +from . import FileMixin @on_demand_benchmark diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index ba24141d87..73f7a95e10 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -32,6 +32,7 @@ .. _pull requests: https://github.com/SciTools/iris/pulls .. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/ .. _readthedocs.yml: https://github.com/SciTools/iris/blob/main/requirements/readthedocs.yml +.. _ruff: https://github.com/astral-sh/ruff .. _SciTools: https://github.com/SciTools .. _scitools-iris: https://pypi.org/project/scitools-iris/ .. _sphinx: https://www.sphinx-doc.org/en/master/ diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 423831f9b0..436725dca0 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -96,6 +96,7 @@ This document explains the changes made to Iris for this release lazy data from file. This will also speed up coordinate comparison. (:pull:`5610`) + 🔥 Deprecations =============== @@ -134,6 +135,7 @@ This document explains the changes made to Iris for this release #. `@bouweandela`_ updated all hyperlinks to https. (:pull:`5621`) + 💼 Internal =========== @@ -175,6 +177,8 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ configured the ``line-length = 88`` for `black`_, `isort`_ and `ruff`_. (:pull:`5632`) +#. `@bjlittle`_ replaced `isort`_ with `ruff`_. (:pull:`5633`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, @@ -190,5 +194,4 @@ This document explains the changes made to Iris for this release .. _NEP29 Drop Schedule: https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule .. _codespell: https://github.com/codespell-project/codespell -.. _ruff: https://github.com/astral-sh/ruff .. _split attributes project: https://github.com/orgs/SciTools/projects/5?pane=info diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index c9d1936c41..8c9794f7f8 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -19,8 +19,8 @@ from dask.array.core import broadcast_shapes import numpy as np -from . import LENIENT from ..config import get_logger +from . import LENIENT __all__ = ["Resolve"] diff --git a/lib/iris/tests/experimental/test_raster.py b/lib/iris/tests/experimental/test_raster.py index 0345ed2595..3f268a2854 100644 --- a/lib/iris/tests/experimental/test_raster.py +++ b/lib/iris/tests/experimental/test_raster.py @@ -4,8 +4,8 @@ # See LICENSE in the root of the repository for full licensing details. import iris.tests as tests # isort:skip -import PIL.Image import numpy as np +import PIL.Image import iris diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index c2b39f1ac5..0e491c6ed7 100755 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -129,8 +129,8 @@ def repos_equal(repo1: Dict[str, str], repo2: Dict[str, str]) -> bool: def get_phash(input: Path) -> str: - from PIL import Image import imagehash + from PIL import Image return imagehash.phash(Image.open(input), hash_size=HASH_SIZE) diff --git a/pyproject.toml b/pyproject.toml index 56c8facbff..f1fe1befba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,6 +89,10 @@ ignore = [ "ALL", ] +[tool.ruff.lint.isort] +force-sort-within-sections = true +known-first-party = ["iris"] + [tool.ruff.lint.pydocstyle] convention = "numpy" @@ -125,20 +129,6 @@ extend-exclude = ''' ) ''' -[tool.isort] -force_sort_within_sections = "True" -known_first_party = "iris" -line_length = 88 -profile = "black" -extend_skip = [ - "_build", - "generated", - "sphinxext", - "tools", -] -skip_gitignore = "True" -verbose = "False" - [tool.pytest.ini_options] addopts = "-ra" testpaths = "lib/iris" From 1e190b62ed0b646cd12af2c2f4eb3194fe998621 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Wed, 13 Dec 2023 16:52:19 +0000 Subject: [PATCH 113/134] Replace black with ruff (#5634) * Replace black with ruff * ruff format changes * add whatsnew entry * ruff format changes * refactor --- .pre-commit-config.yaml | 9 ++------- README.md | 18 ++++++++--------- .../plot_projections_and_annotations.py | 5 +++-- docs/src/conf.py | 4 +--- docs/src/whatsnew/latest.rst | 2 ++ lib/iris/analysis/_regrid.py | 5 +++-- lib/iris/analysis/calculus.py | 10 ++++++---- lib/iris/analysis/cartography.py | 13 ++++++------ lib/iris/aux_factory.py | 20 ++++++++----------- lib/iris/coord_categorisation.py | 10 ++++++---- lib/iris/fileformats/_ff.py | 10 ++++++---- lib/iris/fileformats/netcdf/saver.py | 5 ++--- lib/iris/fileformats/nimrod.py | 5 +++-- lib/iris/fileformats/nimrod_load_rules.py | 3 +-- lib/iris/fileformats/pp.py | 10 +++++----- lib/iris/fileformats/rules.py | 6 ++---- lib/iris/plot.py | 5 +++-- .../analysis/cartography/test_rotate_winds.py | 4 +--- .../unit/common/metadata/test_CubeMetadata.py | 2 +- .../tests/unit/common/resolve/test_Resolve.py | 2 +- .../tests/unit/fileformats/ff/test_FF2PP.py | 4 +--- .../name_loaders/test__build_cell_methods.py | 10 ++++++---- .../nimrod_load_rules/test_units.py | 4 ++-- .../tests/unit/fileformats/pp/test_save.py | 5 +++-- .../test_optimal_array_structure.py | 10 ++++++---- pyproject.toml | 19 +++--------------- 26 files changed, 93 insertions(+), 107 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0b5977998c..b8289b9583 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,6 +34,8 @@ repos: - id: ruff types: [file, python] args: [--fix, --show-fixes] + - id: ruff-format + types: [file, python] - repo: https://github.com/codespell-project/codespell rev: "v2.2.6" @@ -42,13 +44,6 @@ repos: types_or: [asciidoc, python, markdown, rst] additional_dependencies: [tomli] -- repo: https://github.com/psf/black - rev: 23.11.0 - hooks: - - id: black - pass_filenames: false - args: [--config=./pyproject.toml, .] - - repo: https://github.com/PyCQA/flake8 rev: 6.1.0 hooks: diff --git a/README.md b/README.md index 493aa87681..7eec86c6da 100644 --- a/README.md +++ b/README.md @@ -9,15 +9,15 @@ analysing and visualising Earth science data -| | | -|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| ⚙️ CI | [![ci-manifest](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml) [![ci-tests](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml) [![ci-wheels](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml) [![pre-commit](https://results.pre-commit.ci/badge/github/SciTools/iris/main.svg)](https://results.pre-commit.ci/latest/github/SciTools/iris/main) | -| 💬 Community | [![Contributor Covenant](https://img.shields.io/badge/contributor%20covenant-2.1-4baaaa.svg)](https://www.contributor-covenant.org/version/2/1/code_of_conduct/) [![GH Discussions](https://img.shields.io/badge/github-discussions%20%F0%9F%92%AC-yellow?logo=github&logoColor=lightgrey)](https://github.com/SciTools/iris/discussions) [![twitter](https://img.shields.io/twitter/follow/scitools_iris?color=yellow&label=twitter%7Cscitools_iris&logo=twitter&style=plastic)](https://twitter.com/scitools_iris) | -| 📖 Documentation | [![rtd](https://readthedocs.org/projects/scitools-iris/badge/?version=latest)](https://scitools-iris.readthedocs.io/en/latest/?badge=latest) | -| 📈 Health | [![codecov](https://codecov.io/gh/SciTools/iris/branch/main/graph/badge.svg?token=0GeICSIF3g)](https://codecov.io/gh/SciTools/iris) | -| ✨ Meta | [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![code style - black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/LICENSE) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | -| 📦 Package | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.595182.svg)](https://doi.org/10.5281/zenodo.595182) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/iris?color=orange&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/iris) [![pypi](https://img.shields.io/pypi/v/scitools-iris?color=orange&label=pypi&logo=python&logoColor=white)](https://pypi.org/project/scitools-iris/) [![pypi - python version](https://img.shields.io/pypi/pyversions/scitools-iris.svg?color=orange&logo=python&label=python&logoColor=white)](https://pypi.org/project/scitools-iris/) | -| 🧰 Repo | [![commits-since](https://img.shields.io/github/commits-since/SciTools/iris/latest.svg)](https://github.com/SciTools/iris/commits/main) [![contributors](https://img.shields.io/github/contributors/SciTools/iris)](https://github.com/SciTools/iris/graphs/contributors) [![release](https://img.shields.io/github/v/release/scitools/iris)](https://github.com/SciTools/iris/releases) | +| | | +|------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| ⚙️ CI | [![ci-manifest](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-manifest.yml) [![ci-tests](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-tests.yml) [![ci-wheels](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml/badge.svg)](https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml) [![pre-commit](https://results.pre-commit.ci/badge/github/SciTools/iris/main.svg)](https://results.pre-commit.ci/latest/github/SciTools/iris/main) | +| 💬 Community | [![Contributor Covenant](https://img.shields.io/badge/contributor%20covenant-2.1-4baaaa.svg)](https://www.contributor-covenant.org/version/2/1/code_of_conduct/) [![GH Discussions](https://img.shields.io/badge/github-discussions%20%F0%9F%92%AC-yellow?logo=github&logoColor=lightgrey)](https://github.com/SciTools/iris/discussions) [![twitter](https://img.shields.io/twitter/follow/scitools_iris?color=yellow&label=twitter%7Cscitools_iris&logo=twitter&style=plastic)](https://twitter.com/scitools_iris) | +| 📖 Documentation | [![rtd](https://readthedocs.org/projects/scitools-iris/badge/?version=latest)](https://scitools-iris.readthedocs.io/en/latest/?badge=latest) | +| 📈 Health | [![codecov](https://codecov.io/gh/SciTools/iris/branch/main/graph/badge.svg?token=0GeICSIF3g)](https://codecov.io/gh/SciTools/iris) | +| ✨ Meta | [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/LICENSE) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | +| 📦 Package | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.595182.svg)](https://doi.org/10.5281/zenodo.595182) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/iris?color=orange&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/iris) [![pypi](https://img.shields.io/pypi/v/scitools-iris?color=orange&label=pypi&logo=python&logoColor=white)](https://pypi.org/project/scitools-iris/) [![pypi - python version](https://img.shields.io/pypi/pyversions/scitools-iris.svg?color=orange&logo=python&label=python&logoColor=white)](https://pypi.org/project/scitools-iris/) | +| 🧰 Repo | [![commits-since](https://img.shields.io/github/commits-since/SciTools/iris/latest.svg)](https://github.com/SciTools/iris/commits/main) [![contributors](https://img.shields.io/github/contributors/SciTools/iris)](https://github.com/SciTools/iris/graphs/contributors) [![release](https://img.shields.io/github/v/release/scitools/iris)](https://github.com/SciTools/iris/releases) | | |

diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index a5a5267f1c..76c7206748 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -123,8 +123,9 @@ def make_plot(projection_name, projection_crs): # Add a title, and display. plt.title( - "A pseudocolour plot on the {} projection,\n" - "with overlaid contours.".format(projection_name) + "A pseudocolour plot on the {} projection,\nwith overlaid contours.".format( + projection_name + ) ) iplt.show() diff --git a/docs/src/conf.py b/docs/src/conf.py index e0d8148a54..dab4c9052d 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -331,9 +331,7 @@ def _dotv(version): # if we are building via Read The Docs and it is the latest (not stable) if on_rtd and rtd_version == "latest": - html_theme_options[ - "announcement" - ] = f""" + html_theme_options["announcement"] = f""" You are viewing the latest unreleased documentation {version}. You can switch to a stable diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 436725dca0..13e3b9e4ea 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -179,6 +179,8 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ replaced `isort`_ with `ruff`_. (:pull:`5633`) +#. `@bjlittle`_ replaced `black`_ with `ruff`_. (:pull:`5634`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 9953fc4cb4..ad5a4557da 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -434,8 +434,9 @@ def _get_horizontal_coord(cube, axis): coords = cube.coords(axis=axis, dim_coords=False) if len(coords) != 1: raise ValueError( - "Cube {!r} must contain a single 1D {} " - "coordinate.".format(cube.name(), axis) + "Cube {!r} must contain a single 1D {} coordinate.".format( + cube.name(), axis + ) ) return coords[0] diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 220999ca55..ee5d402b89 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -167,8 +167,9 @@ def cube_delta(cube, coord): coord.shape[0] == 1 and not getattr(coord, "circular", False) ) or not delta_dims: raise ValueError( - "Cannot calculate delta over {!r} as it has " - "length of 1.".format(coord.name()) + "Cannot calculate delta over {!r} as it has length of 1.".format( + coord.name() + ) ) delta_dim = delta_dims[0] @@ -570,8 +571,9 @@ def curl(i_cube, j_cube, k_cube=None): bad_coords = coord_comparison["resamplable"] if bad_coords: raise ValueError( - "Some coordinates are different ({}), consider " - "resampling.".format(", ".join(group.name() for group in bad_coords)) + "Some coordinates are different ({}), consider resampling.".format( + ", ".join(group.name() for group in bad_coords) + ) ) # Get the dim_coord, or None if none exist, for the xyz dimensions diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 03de657601..bcdc1a6b21 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -414,8 +414,9 @@ def area_weights(cube, normalize=False): lon, lat = _get_lon_lat_coords(cube) except IndexError: raise ValueError( - "Cannot get latitude/longitude " - "coordinates from cube {!r}.".format(cube.name()) + "Cannot get latitude/longitude coordinates from cube {!r}.".format( + cube.name() + ) ) if lat.ndim > 1: @@ -631,8 +632,9 @@ def project(cube, target_proj, nx=None, ny=None): lon_coord, lat_coord = _get_lon_lat_coords(cube) except IndexError: raise ValueError( - "Cannot get latitude/longitude " - "coordinates from cube {!r}.".format(cube.name()) + "Cannot get latitude/longitude coordinates from cube {!r}.".format( + cube.name() + ) ) if lat_coord.coord_system != lon_coord.coord_system: @@ -840,8 +842,7 @@ def project(cube, target_proj, nx=None, ny=None): discarded_coords = coords_to_ignore.difference([lat_coord, lon_coord]) if discarded_coords: warnings.warn( - "Discarding coordinates that share dimensions with " - "{} and {}: {}".format( + "Discarding coordinates that share dimensions with {} and {}: {}".format( lat_coord.name(), lon_coord.name(), [coord.name() for coord in discarded_coords], diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index 819dd7c17d..e5a704bb56 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -994,9 +994,8 @@ def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev): sigma.units = cf_units.Unit("1") if sigma is not None and not sigma.units.is_dimensionless(): - msg = ( - "Invalid units: sigma coordinate {!r} " - "must be dimensionless.".format(sigma.name()) + msg = "Invalid units: sigma coordinate {!r} must be dimensionless.".format( + sigma.name() ) raise ValueError(msg) @@ -1198,9 +1197,8 @@ def _check_dependencies(sigma, eta, depth): # Check bounds and shape. coord, term = (sigma, "sigma") if coord is not None and coord.nbounds not in (0, 2): - msg = ( - "Invalid {} coordinate {!r}: must have either " - "0 or 2 bounds.".format(term, coord.name()) + msg = "Invalid {} coordinate {!r}: must have either 0 or 2 bounds.".format( + term, coord.name() ) raise ValueError(msg) @@ -1219,9 +1217,8 @@ def _check_dependencies(sigma, eta, depth): sigma.units = cf_units.Unit("1") if sigma is not None and not sigma.units.is_dimensionless(): - msg = ( - "Invalid units: sigma coordinate {!r} " - "must be dimensionless.".format(sigma.name()) + msg = "Invalid units: sigma coordinate {!r} must be dimensionless.".format( + sigma.name() ) raise ValueError(msg) @@ -1550,9 +1547,8 @@ def _check_dependencies(s, eta, depth, a, b, depth_c): # Check bounds and shape. if s is not None and s.nbounds not in (0, 2): - msg = ( - "Invalid s coordinate {!r}: must have either " - "0 or 2 bounds.".format(s.name()) + msg = "Invalid s coordinate {!r}: must have either 0 or 2 bounds.".format( + s.name() ) raise ValueError(msg) diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index c9ad04097d..2515274b8e 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -245,8 +245,9 @@ def _validate_seasons(seasons): ] if not_present: raise ValueError( - "some months do not appear in any season: " - "{!s}".format(", ".join(not_present)) + "some months do not appear in any season: {!s}".format( + ", ".join(not_present) + ) ) # Make a list of months that appear multiple times... multi_present = [ @@ -254,8 +255,9 @@ def _validate_seasons(seasons): ] if multi_present: raise ValueError( - "some months appear in more than one season: " - "{!s}".format(", ".join(multi_present)) + "some months appear in more than one season: {!s}".format( + ", ".join(multi_present) + ) ) return diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 741ca626a9..5c32c23757 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -585,8 +585,9 @@ def _adjust_field_for_lbc(self, field): lbtim_default = 11 if field.lbtim not in (0, lbtim_default): raise ValueError( - "LBC field has LBTIM of {:d}, expected only " - "0 or {:d}.".format(field.lbtim, lbtim_default) + "LBC field has LBTIM of {:d}, expected only 0 or {:d}.".format( + field.lbtim, lbtim_default + ) ) field.lbtim = lbtim_default @@ -595,8 +596,9 @@ def _adjust_field_for_lbc(self, field): lbvc_default = 65 if field.lbvc not in (0, lbvc_default): raise ValueError( - "LBC field has LBVC of {:d}, expected only " - "0 or {:d}.".format(field.lbvc, lbvc_default) + "LBC field has LBVC of {:d}, expected only 0 or {:d}.".format( + field.lbvc, lbvc_default + ) ) field.lbvc = lbvc_default # Specifying a vertical encoding scheme means a usable vertical diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index e7e4ec615b..b35b85bbae 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1130,9 +1130,8 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): for factory in cube.aux_factories: factory_defn = _FACTORY_DEFNS.get(type(factory), None) if factory_defn is None: - msg = ( - "Unable to determine formula terms " - "for AuxFactory: {!r}".format(factory) + msg = "Unable to determine formula terms for AuxFactory: {!r}".format( + factory ) warnings.warn(msg, category=iris.exceptions.IrisSaveWarning) else: diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index 116fecaa35..6a25089ef1 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -236,8 +236,9 @@ def _read_header(self, infile): trailing_length = struct.unpack(">L", infile.read(4))[0] if trailing_length != leading_length: raise TranslationError( - "Expected header trailing_length of {}, " - "got {}.".format(leading_length, trailing_length) + "Expected header trailing_length of {}, got {}.".format( + leading_length, trailing_length + ) ) def _read_data(self, infile): diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index 39a150beb0..d72571fff9 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -440,8 +440,7 @@ def horizontal_grid(cube, field, handle_metadata_errors): y_coord_name = "latitude" else: raise TranslationError( - "Horizontal grid type {} not " - "implemented".format(field.horizontal_grid_type) + "Horizontal grid type {} not implemented".format(field.horizontal_grid_type) ) points = np.linspace( field.x_origin, diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index ec87870b1b..e70d4cbf52 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -1334,9 +1334,8 @@ def save(self, file_handle): elif lbpack == 1: pp_file.write(packed_data) else: - msg = ( - "Writing packed pp data with lbpack of {} " - "is not supported.".format(lbpack) + msg = "Writing packed pp data with lbpack of {} is not supported.".format( + lbpack ) raise NotImplementedError(msg) @@ -1794,8 +1793,9 @@ def fetch_valid_values_array(): # Check whether this field uses a land or a sea mask. if field.lbpack.n3 not in (1, 2): raise ValueError( - "Unsupported mask compression : " - "lbpack.n3 = {}.".format(field.lbpack.n3) + "Unsupported mask compression : lbpack.n3 = {}.".format( + field.lbpack.n3 + ) ) if field.lbpack.n3 == 2: # Sea-mask packing : points are inverse of the land-mask. diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 9326901da2..4dbaa8bb51 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -127,13 +127,11 @@ def aux_factory(cube, aux_factory_class): ] if not aux_factories: raise ValueError( - "Cube does not have an aux factory of " - "type {!r}.".format(aux_factory_class) + "Cube does not have an aux factory of type {!r}.".format(aux_factory_class) ) elif len(aux_factories) > 1: raise ValueError( - "Cube has more than one aux factory of " - "type {!r}.".format(aux_factory_class) + "Cube has more than one aux factory of type {!r}.".format(aux_factory_class) ) return aux_factories[0] diff --git a/lib/iris/plot.py b/lib/iris/plot.py index b432ed7620..2dbadab2c9 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -104,8 +104,9 @@ def get_span(coord): total_span = set().union(*spans) if len(total_span) != ndims: raise ValueError( - "The given coordinates ({}) don't span the {} data" - " dimensions.".format(names(coords), ndims) + "The given coordinates ({}) don't span the {} data dimensions.".format( + names(coords), ndims + ) ) # If we have 2-dimensional data, and one or more 1-dimensional diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index ec69fc0e20..12f6141df8 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -152,9 +152,7 @@ def _unrotate_equation( np.radians(trueLongitude) - lambda_angle ) + np.sin(np.radians(rotated_lons)) * np.sin( np.radians(trueLongitude) - lambda_angle - ) * np.cos( - phi_angle - ) + ) * np.cos(phi_angle) sin_rot = -( (np.sin(np.radians(trueLongitude) - lambda_angle) * np.sin(phi_angle)) / np.cos(np.radians(rotated_lats)) diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index b27dac282b..f06f2ceea0 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -394,7 +394,7 @@ def test_splitattrs_cases( "secondaryXC", "secondaryCC", "secondaryCD", - ] + ], # NOTE: test CX as well as XC, since primary choices has "AX" but not "XA". ) def test_splitattrs_global_local_independence( diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 7445045fdb..7c3dc0d4ae 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -439,7 +439,7 @@ def setUp(self): ] column_parts = [x for x in zip(*parts)] self.metadata, self.coords, self.dims = [list(x) for x in column_parts] - self.dims = [dim for dim, in self.dims] + self.dims = [dim for (dim,) in self.dims] for metadata, coord, dims in parts: item = _Item(metadata=metadata, coord=coord, dims=dims) self.items.append(item) diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index c06d8db85a..da3ac01f66 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -91,9 +91,7 @@ def mock_for_extract_field(self, fields, x=None, y=None): "struct.unpack_from", return_value=[4] ), mock.patch( "iris.fileformats.pp.make_pp_field", side_effect=fields - ), mock.patch( - "iris.fileformats._ff.FF2PP._payload", return_value=(0, 0) - ): + ), mock.patch("iris.fileformats._ff.FF2PP._payload", return_value=(0, 0)): yield ff2pp def _mock_lbc(self, **kwargs): diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py index 2ebde5782f..26e6208db1 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py @@ -101,8 +101,9 @@ def test_unrecognised(self): with mock.patch("warnings.warn") as warn: _ = _build_cell_methods(av_or_int, coord_name) expected_msg = ( - "Unknown {} statistic: {!r}. Unable to " - "create cell method.".format(coord_name, unrecognised_heading) + "Unknown {} statistic: {!r}. Unable to create cell method.".format( + coord_name, unrecognised_heading + ) ) warn.assert_called_with(expected_msg, category=IrisLoadWarning) @@ -126,8 +127,9 @@ def test_unrecognised_similar_to_no_averaging(self): with mock.patch("warnings.warn") as warn: _ = _build_cell_methods(av_or_int, coord_name) expected_msg = ( - "Unknown {} statistic: {!r}. Unable to " - "create cell method.".format(coord_name, unrecognised_heading) + "Unknown {} statistic: {!r}. Unable to create cell method.".format( + coord_name, unrecognised_heading + ) ) warn.assert_called_with(expected_msg, category=IrisLoadWarning) diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py index 1f158ce107..88d1f56a7a 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py @@ -80,7 +80,7 @@ def test_power_in_units(self): def test_ug_per_m3_units(self): with mock.patch("warnings.warn") as warn: self._call_units( - data=((np.ones_like(self.cube.data) * 10)), + data=(np.ones_like(self.cube.data) * 10), units_str="ug/m3E1", ) self.assertEqual(warn.call_count, 0) @@ -91,7 +91,7 @@ def test_ug_per_m3_units(self): def test_g_per_kg(self): with mock.patch("warnings.warn") as warn: self._call_units( - data=((np.ones_like(self.cube.data) * 1000)), units_str="g/Kg" + data=(np.ones_like(self.cube.data) * 1000), units_str="g/Kg" ) self.assertEqual(warn.call_count, 0) self.assertEqual(self.cube.units, "kg/kg") diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index 2482501ef4..1fcf04636c 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -140,8 +140,9 @@ def check_cube_name_units_yields_lbfc(self, name, units, lbfc_expected): self.assertEqual( lbfc_produced, lbfc_expected, - "Lbfc for ({!r} / {!r}) should be {:d}, " - "got {:d}".format(name, units, lbfc_expected, lbfc_produced), + "Lbfc for ({!r} / {!r}) should be {:d}, got {:d}".format( + name, units, lbfc_expected, lbfc_produced + ), ) def test_name_units_to_lbfc(self): diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py index cf545e89d5..63f54fd356 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py @@ -30,14 +30,16 @@ def _check_arrays_and_dims(self, result, spec): self.assertEqual( result_dims, spec_dims, - 'element dims differ for "{}": ' - "result={!r}, expected {!r}".format(keyname, result_dims, spec_dims), + 'element dims differ for "{}": ' "result={!r}, expected {!r}".format( + keyname, result_dims, spec_dims + ), ) self.assertArrayEqual( result_array, spec_array, - 'element arrays differ for "{}": ' - "result={!r}, expected {!r}".format(keyname, result_array, spec_array), + 'element arrays differ for "{}": ' "result={!r}, expected {!r}".format( + keyname, result_array, spec_array + ), ) def test_none(self): diff --git a/pyproject.toml b/pyproject.toml index f1fe1befba..73e9bf0693 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,6 +75,9 @@ src = [ ] target-version = "py39" +[tool.ruff.format] +preview = false + [tool.ruff.lint] ignore = [ # NOTE: Non-permanent exclusions should be added to the ".ruff.toml" file. @@ -113,22 +116,6 @@ write_to = "lib/iris/_version.py" local_scheme = "dirty-tag" version_scheme = "release-branch-semver" -[tool.black] -line-length = 88 -target-version = ['py39'] -include = '\.pyi?$' -extend-exclude = ''' -( - /( - | sphinxext - | tools - | benchmarks\/\.asv.* - )/ - | _ff_cross_references.py - | um_cf_map.py -) -''' - [tool.pytest.ini_options] addopts = "-ra" testpaths = "lib/iris" From 1430c3d9f7451334d02143cb05ceba1b2aa778c8 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Wed, 13 Dec 2023 17:02:59 +0000 Subject: [PATCH 114/134] ruff compliant and improved docstrings. (#5635) --- .ruff.toml | 1 - lib/iris/__init__.py | 185 +++--- lib/iris/_constraints.py | 126 ++-- lib/iris/analysis/__init__.py | 1156 +++++++++++++++------------------ lib/iris/analysis/calculus.py | 173 ++--- lib/iris/aux_factory.py | 321 ++++----- lib/iris/io/__init__.py | 1 + pyproject.toml | 2 + 8 files changed, 934 insertions(+), 1031 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 1f1f23b0b8..afe5941a14 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -26,7 +26,6 @@ lint.ignore = [ "D", # Permanent "D105", # Missing docstring in magic method - # Temporary, to be removed when we are more compliant "D417", # Missing argument descriptions in the docstring "D101", # Missing docstring in public class diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index e4016ff4d5..828a507fff 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -A package for handling multi-dimensional data and associated metadata. +"""A package for handling multi-dimensional data and associated metadata. .. note :: @@ -34,57 +33,57 @@ All the load functions share very similar arguments: - * uris: - Either a single filename/URI expressed as a string or - :class:`pathlib.PurePath`, or an iterable of filenames/URIs. +* uris: + Either a single filename/URI expressed as a string or + :class:`pathlib.PurePath`, or an iterable of filenames/URIs. - Filenames can contain `~` or `~user` abbreviations, and/or - Unix shell-style wildcards (e.g. `*` and `?`). See the - standard library function :func:`os.path.expanduser` and - module :mod:`fnmatch` for more details. + Filenames can contain `~` or `~user` abbreviations, and/or + Unix shell-style wildcards (e.g. `*` and `?`). See the + standard library function :func:`os.path.expanduser` and + module :mod:`fnmatch` for more details. - .. warning:: + .. warning:: - If supplying a URL, only OPeNDAP Data Sources are supported. + If supplying a URL, only OPeNDAP Data Sources are supported. - * constraints: - Either a single constraint, or an iterable of constraints. - Each constraint can be either a string, an instance of - :class:`iris.Constraint`, or an instance of - :class:`iris.AttributeConstraint`. If the constraint is a string - it will be used to match against cube.name(). +* constraints: + Either a single constraint, or an iterable of constraints. + Each constraint can be either a string, an instance of + :class:`iris.Constraint`, or an instance of + :class:`iris.AttributeConstraint`. If the constraint is a string + it will be used to match against cube.name(). - .. _constraint_egs: + .. _constraint_egs: - For example:: + For example:: - # Load air temperature data. - load_cube(uri, 'air_temperature') + # Load air temperature data. + load_cube(uri, 'air_temperature') - # Load data with a specific model level number. - load_cube(uri, iris.Constraint(model_level_number=1)) + # Load data with a specific model level number. + load_cube(uri, iris.Constraint(model_level_number=1)) - # Load data with a specific STASH code. - load_cube(uri, iris.AttributeConstraint(STASH='m01s00i004')) + # Load data with a specific STASH code. + load_cube(uri, iris.AttributeConstraint(STASH='m01s00i004')) - * callback: - A function to add metadata from the originating field and/or URI which - obeys the following rules: +* callback: + A function to add metadata from the originating field and/or URI which + obeys the following rules: - 1. Function signature must be: ``(cube, field, filename)``. - 2. Modifies the given cube inplace, unless a new cube is - returned by the function. - 3. If the cube is to be rejected the callback must raise - an :class:`iris.exceptions.IgnoreCubeException`. + 1. Function signature must be: ``(cube, field, filename)``. + 2. Modifies the given cube inplace, unless a new cube is + returned by the function. + 3. If the cube is to be rejected the callback must raise + an :class:`iris.exceptions.IgnoreCubeException`. - For example:: + For example:: - def callback(cube, field, filename): - # Extract ID from filenames given as: __ - experiment_id = filename.split('__')[1] - experiment_coord = iris.coords.AuxCoord( - experiment_id, long_name='experiment_id') - cube.add_aux_coord(experiment_coord) + def callback(cube, field, filename): + # Extract ID from filenames given as: __ + experiment_id = filename.split('__')[1] + experiment_coord = iris.coords.AuxCoord( + experiment_id, long_name='experiment_id') + cube.add_aux_coord(experiment_coord) """ @@ -142,8 +141,7 @@ class Future(threading.local): """Run-time configuration controller.""" def __init__(self, datum_support=False, pandas_ndim=False, save_split_attrs=False): - """ - A container for run-time options controls. + r"""Container for run-time options controls. To adjust the values simply update the relevant attribute from within your code. For example:: @@ -218,9 +216,7 @@ def __setattr__(self, name, value): @contextlib.contextmanager def context(self, **kwargs): - """ - Return a context manager which allows temporary modification of - the option values for the active thread. + """Return context manager for temp modification of option values for the active thread. On entry to the `with` statement, all keyword arguments are applied to the Future object. On exit from the `with` @@ -263,7 +259,7 @@ def context(self, **kwargs): def _generate_cubes(uris, callback, constraints): - """Returns a generator of cubes given the URIs and a callback.""" + """Return a generator of cubes given the URIs and a callback.""" if isinstance(uris, str) or not isinstance(uris, Iterable): # Make a string, or other single item, into an iterable. uris = [uris] @@ -304,26 +300,24 @@ def _load_collection(uris, constraints=None, callback=None): def load(uris, constraints=None, callback=None): - """ - Loads any number of Cubes for each constraint. + """Load any number of Cubes for each constraint. For a full description of the arguments, please see the module documentation for :mod:`iris`. - Args: - - * uris: + Parameters + ---------- + uris : str or :class:`pathlib.PurePath` One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. If supplying a URL, only OPeNDAP Data Sources are supported. - - Kwargs: - - * constraints: + constraints : optional One or more constraints. - * callback: + callback : optional A modifier/filter function. - Returns: + Returns + ------- + :class:`iris.cube.CubeList` An :class:`iris.cube.CubeList`. Note that there is no inherent order to this :class:`iris.cube.CubeList` and it should be treated as if it were random. @@ -333,27 +327,24 @@ def load(uris, constraints=None, callback=None): def load_cube(uris, constraint=None, callback=None): - """ - Loads a single cube. + """Load a single cube. For a full description of the arguments, please see the module documentation for :mod:`iris`. - Args: - - * uris: + Parameters + ---------- + uris : One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. If supplying a URL, only OPeNDAP Data Sources are supported. - - Kwargs: - - * constraints: + constraints : optional A constraint. - * callback: + callback : optional A modifier/filter function. - Returns: - An :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube` """ constraints = iris._constraints.list_of_constraints(constraint) @@ -373,26 +364,24 @@ def load_cube(uris, constraint=None, callback=None): def load_cubes(uris, constraints=None, callback=None): - """ - Loads exactly one Cube for each constraint. + """Load exactly one Cube for each constraint. For a full description of the arguments, please see the module documentation for :mod:`iris`. - Args: - - * uris: + Parameters + ---------- + uris : One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. If supplying a URL, only OPeNDAP Data Sources are supported. - - Kwargs: - - * constraints: + constraints : optional One or more constraints. - * callback: + callback : optional A modifier/filter function. - Returns: + Returns + ------- + :class:`iris.cube.CubeList` An :class:`iris.cube.CubeList`. Note that there is no inherent order to this :class:`iris.cube.CubeList` and it should be treated as if it were random. @@ -413,8 +402,7 @@ def load_cubes(uris, constraints=None, callback=None): def load_raw(uris, constraints=None, callback=None): - """ - Loads non-merged cubes. + """Load non-merged cubes. This function is provided for those occasions where the automatic combination of cubes into higher-dimensional cubes is undesirable. @@ -425,21 +413,19 @@ def load_raw(uris, constraints=None, callback=None): For a full description of the arguments, please see the module documentation for :mod:`iris`. - Args: - - * uris: + Parameters + ---------- + uris : One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. If supplying a URL, only OPeNDAP Data Sources are supported. - - Kwargs: - - * constraints: + constraints : optional One or more constraints. - * callback: + callback : optional A modifier/filter function. - Returns: - An :class:`iris.cube.CubeList`. + Returns + ------- + :class:`iris.cube.CubeList` """ from iris.fileformats.um._fast_load import _raw_structured_loading @@ -452,8 +438,7 @@ def load_raw(uris, constraints=None, callback=None): def sample_data_path(*path_to_join): - """ - Given the sample data resource, returns the full path to the file. + """Given the sample data resource, returns the full path to the file. .. note:: @@ -487,10 +472,16 @@ def sample_data_path(*path_to_join): def use_plugin(plugin_name): - """ - Convenience function to import a plugin + """Import a plugin. - For example:: + Parameters + ---------- + plugin_name : str + Name of plugin. + + Examples + -------- + The following:: use_plugin("my_plugin") diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index cbac8cbca4..4ce14d5ece 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -2,10 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides objects for building up expressions useful for pattern matching. - -""" +"""Provide objects for building up expressions useful for pattern matching.""" from collections.abc import Iterable, Mapping import operator @@ -16,7 +13,8 @@ class Constraint: - """ + """Cubes can be pattern matched and filtered according to specific criteria. + Constraints are the mechanism by which cubes can be pattern matched and filtered according to specific criteria. @@ -26,22 +24,23 @@ class Constraint: """ def __init__(self, name=None, cube_func=None, coord_values=None, **kwargs): - """ + """Use for filtering cube loading or cube list extraction. + Creates a new instance of a Constraint which can be used for filtering cube loading or cube list extraction. - Args: - - * name: string or None + Parameters + ---------- + name : str or None, optional If a string, it is used as the name to match against the - `~iris.cube.Cube.names` property. - * cube_func: callable or None + :attr:`iris.cube.Cube.names` property. TREMTEST + cube_func : callable or None, optional If a callable, it must accept a Cube as its first and only argument and return either True or False. - * coord_values: dict or None + coord_values : dict or None, optional If a dict, it must map coordinate name to the condition on the associated coordinate. - * `**kwargs`: + **kwargs : The remaining keyword arguments are converted to coordinate constraints. The name of the argument gives the name of a coordinate, and the value of the argument is the condition to meet @@ -62,6 +61,8 @@ def __init__(self, name=None, cube_func=None, coord_values=None, **kwargs): returning True or False if the value of the Cell is desired. e.g. ``model_level_number=lambda cell: 5 < cell < 10`` + Examples + -------- The :ref:`user guide ` covers cube much of constraining in detail, however an example which uses all of the features of this class is given here for completeness:: @@ -161,7 +162,8 @@ def __repr__(self): return "Constraint(%s)" % ", ".join("%s=%r" % (k, v) for k, v in args) def _coordless_match(self, cube): - """ + """Return whether this constraint matches the given cube. + Return whether this constraint matches the given cube when not taking coordinates into account. @@ -176,7 +178,8 @@ def _coordless_match(self, cube): return match def extract(self, cube): - """ + """Return the subset of the given cube which matches this constraint. + Return the subset of the given cube which matches this constraint, else return None. @@ -220,9 +223,10 @@ class ConstraintCombination(Constraint): """Represents the binary combination of two Constraint instances.""" def __init__(self, lhs, rhs, operator): - """ - A ConstraintCombination instance is created by providing two - Constraint instances and the appropriate :mod:`operator`. + """Instance created by providing two Constraint instances. + + Instance created by providing two Constraint instances and the + appropriate :mod:`operator`. """ try: @@ -270,15 +274,16 @@ class _CoordConstraint: """Represents the atomic elements which might build up a Constraint.""" def __init__(self, coord_name, coord_thing): - """ + """Create a coordinate constraint. + Create a coordinate constraint given the coordinate name and a thing to compare it with. - Arguments: - - * coord_name - string + Parameters + ---------- + coord_name : str The name of the coordinate to constrain - * coord_thing + coord_thing : The object to compare """ @@ -304,11 +309,7 @@ def __hash__(self): return id(self) def extract(self, cube): - """ - Returns the the column based indices of the given cube which - match the constraint. - - """ + """Return the column based indices of the cube which match the constraint.""" from iris.coords import Cell, DimCoord # Cater for scalar cubes by setting the dimensionality to 1 @@ -371,9 +372,10 @@ def call_func(c): class _ColumnIndexManager: - """ - A class to represent column aligned slices which can be operated on - using ``&``, ``|`` or ``^``. + """Represent column aligned slices which can be operated on. + + Represent column aligned slices which can be operated on using + ``&``, ``|`` or ``^``. :: @@ -386,11 +388,7 @@ class _ColumnIndexManager: """ def __init__(self, ndims): - """ - A _ColumnIndexManager is always created to span the given - number of dimensions. - - """ + """_ColumnIndexManager always created to span the given number of dimensions.""" self._column_arrays = [True] * ndims self.ndims = ndims @@ -437,8 +435,9 @@ def __setitem__(self, key, value): ) def as_slice(self): - """ - Turns a _ColumnIndexManager into a tuple which can be used in an + """Turn a _ColumnIndexManager into a tuple. + + Turn a _ColumnIndexManager into a tuple which can be used in an indexing operation. If no index is possible, None will be returned. @@ -485,11 +484,7 @@ def as_slice(self): def list_of_constraints(constraints): - """ - Turns the given constraints into a list of valid constraints - using :func:`as_constraint`. - - """ + """Turn constraints into list of valid constraints using :func:`as_constraint`.""" if isinstance(constraints, str) or not isinstance(constraints, Iterable): constraints = [constraints] @@ -497,7 +492,8 @@ def list_of_constraints(constraints): def as_constraint(thing): - """ + """Casts an object into a cube constraint where possible. + Casts an object into a cube constraint where possible, otherwise a TypeError will be raised. @@ -519,7 +515,8 @@ class AttributeConstraint(Constraint): """Provides a simple Cube-attribute based :class:`Constraint`.""" def __init__(self, **attributes): - """ + """Provide a simple Cube-attribute based :class:`Constraint`. + Example usage:: iris.AttributeConstraint(STASH='m01s16i004') @@ -569,7 +566,7 @@ def __repr__(self): class NameConstraint(Constraint): - """Provides a simple Cube name based :class:`Constraint`.""" + """Provide a simple Cube name based :class:`Constraint`.""" def __init__( self, @@ -578,8 +575,9 @@ def __init__( var_name="none", STASH="none", ): - """ - Provides a simple Cube name based :class:`Constraint`, which matches + """Provide a simple Cube name based :class:`Constraint`. + + Provide a simple Cube name based :class:`Constraint`, which matches against each of the names provided, which may be either standard name, long name, NetCDF variable name and/or the STASH from the attributes dictionary. @@ -587,30 +585,33 @@ def __init__( The name constraint will only succeed if *all* of the provided names match. - Kwargs: - - * standard_name: + Parameters + ---------- + standard_name : optional A string or callable representing the standard name to match against. - * long_name: + long_name : optional A string or callable representing the long name to match against. - * var_name: + var_name : optional A string or callable representing the NetCDF variable name to match against. - * STASH: + STASH : optional A string or callable representing the UM STASH code to match against. - .. note:: - The default value of each of the keyword arguments is the string - "none", rather than the singleton None, as None may be a legitimate - value to be matched against e.g., to constrain against all cubes - where the standard_name is not set, then use standard_name=None. - - Returns: + Notes + ----- + The default value of each of the keyword arguments is the string + "none", rather than the singleton None, as None may be a legitimate + value to be matched against e.g., to constrain against all cubes + where the standard_name is not set, then use standard_name=None. - * Boolean + Returns + ------- + bool + Examples + -------- Example usage:: iris.NameConstraint(long_name='air temp', var_name=None) @@ -620,7 +621,6 @@ def __init__( iris.NameConstraint(standard_name='air_temperature', STASH=lambda stash: stash.item == 203) """ - self.standard_name = standard_name self.long_name = long_name self.var_name = var_name diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 2d5706d4f5..83ae07d350 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -A package providing :class:`iris.cube.Cube` analysis support. +"""A package providing :class:`iris.cube.Cube` analysis support. This module defines a suite of :class:`~iris.analysis.Aggregator` instances, which are used to specify the statistical measure to calculate over a @@ -26,11 +25,11 @@ The gallery contains several interesting worked examples of how an :class:`~iris.analysis.Aggregator` may be used, including: - * :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_1d.py` - * :ref:`sphx_glr_generated_gallery_general_plot_SOI_filtering.py` - * :ref:`sphx_glr_generated_gallery_meteorology_plot_hovmoller.py` - * :ref:`sphx_glr_generated_gallery_meteorology_plot_lagged_ensemble.py` - * :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py` +* :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_1d.py` +* :ref:`sphx_glr_generated_gallery_general_plot_SOI_filtering.py` +* :ref:`sphx_glr_generated_gallery_meteorology_plot_hovmoller.py` +* :ref:`sphx_glr_generated_gallery_meteorology_plot_lagged_ensemble.py` +* :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py` """ @@ -93,7 +92,8 @@ class _CoordGroup: - """ + """Represents a list of coordinates, one for each given cube. + Represents a list of coordinates, one for each given cube. Which can be operated on conveniently. @@ -110,7 +110,8 @@ def __getitem__(self, key): return list(self).__getitem__(key) def _first_coord_w_cube(self): - """ + """Return the first none None coordinate. + Return the first none None coordinate, and its associated cube as (cube, coord). @@ -137,7 +138,7 @@ def name(self): return first_coord.name() def _oid_tuple(self): - """Return a tuple of object ids for this _CoordGroup's coordinates""" + """Return a tuple of object ids for this _CoordGroup's coordinates.""" return tuple((id(coord) for coord in self)) def __hash__(self): @@ -156,7 +157,8 @@ def __eq__(self, other): return result def matches(self, predicate, default_val=True): - """ + """Apply a function to a coord group returning a list of bools. + Apply a function to a coord group returning a list of bools for each coordinate. @@ -173,7 +175,8 @@ def matches(self, predicate, default_val=True): yield predicate(cube, coord) def matches_all(self, predicate): - """ + """Return whether all coordinates match the given function. + Return whether all coordinates match the given function after running it through :meth:`matches`. @@ -183,7 +186,8 @@ def matches_all(self, predicate): return all(self.matches(predicate)) def matches_any(self, predicate): - """ + """Return whether any coordinates match the given function. + Return whether any coordinates match the given function after running it through :meth:`matches`. @@ -194,7 +198,8 @@ def matches_any(self, predicate): def _dimensional_metadata_comparison(*cubes, object_get=None): - """ + """Help compare coordinates. + Convenience function to help compare coordinates, cell-measures or ancillary-variables, on one or more cubes, by their metadata. @@ -209,25 +214,22 @@ def _dimensional_metadata_comparison(*cubes, object_get=None): `iris.coords.AncillaryVariable`, the cube element type being controlled by the 'object_get' keyword. - Args: - - * cubes (iterable of `iris.cube.Cube`): + Parameters + ---------- + cubes : iterable of `iris.cube.Cube` a set of cubes whose coordinates, cell-measures or ancillary-variables are to be compared. - - Kwargs: - - * object_get (callable(cube) or None): + object_get : callable(cube) or None, optional If not None, this must be a cube method returning a list of all cube elements of the required type, i.e. one of `iris.cube.Cube.coords`, `iris.cube.Cube.cell_measures`, or `iris.cube.Cube.ancillary_variables`. If not specified, defaults to `iris.cube.Cube.coords` - Returns: - - result (dict mapping string: list of _CoordGroup): - A dictionary whose keys are match categories and values are groups of - coordinates, cell-measures or ancillary-variables. + Returns + ------- + result (dict mapping string: list of _CoordGroup): + A dictionary whose keys are match categories and values are groups of + coordinates, cell-measures or ancillary-variables. The values of the returned dictionary are lists of _CoordGroup representing grouped coordinates. Each _CoordGroup contains all the input 'cubes', and a @@ -437,64 +439,50 @@ def no_data_dim_fn(cube, coord): class _Aggregator: - """ - The :class:`_Aggregator` base class provides common aggregation - functionality. - - """ + """Base class provides common aggregation functionality.""" def __init__( self, cell_method, call_func, units_func=None, lazy_func=None, **kwargs ): - r""" - Create an aggregator for the given :data:`call_func`. + r"""Create an aggregator for the given :data:`call_func`. - Args: + Aggregators are used by cube aggregation methods such as + :meth:`~iris.cube.Cube.collapsed` and + :meth:`~iris.cube.Cube.aggregated_by`. For example:: - * cell_method (string): - Cell method definition formatter. Used in the fashion - "cell_method.format(\**kwargs)", to produce a cell-method string - which can include keyword values. + result = cube.collapsed('longitude', iris.analysis.MEAN) - * call_func (callable): - | *Call signature*: (data, axis=None, \**kwargs) + A variety of ready-made aggregators are provided in this module, such + as :data:`~iris.analysis.MEAN` and :data:`~iris.analysis.MAX`. Custom + aggregators can also be created for special purposes, see + :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py` + for a worked example. + Parameters + ---------- + cell_method : str + Cell method definition formatter. Used in the fashion + ``cell_method.format(**kwargs)``, to produce a cell-method string + which can include keyword values. + call_func : callable + Call signature: ``(data, axis=None, **kwargs)``. Data aggregation function. Returns an aggregation result, collapsing the 'axis' dimension of the 'data' argument. - - Kwargs: - - * units_func (callable): - | *Call signature*: (units, \**kwargs) - + units_func : callable, optional + Call signature: `(units, **kwargs)`. If provided, called to convert a cube's units. Returns an :class:`cf_units.Unit`, or a value that can be made into one. To ensure backwards-compatibility, also accepts a callable with call signature (units). - - * lazy_func (callable or None): + lazy_func : callable or None, optional An alternative to :data:`call_func` implementing a lazy aggregation. Note that, it need not support all features of the main operation, but should raise an error in unhandled cases. - - Additional kwargs: + **kwargs : dict, optional Passed through to :data:`call_func`, :data:`lazy_func`, and :data:`units_func`. - - Aggregators are used by cube aggregation methods such as - :meth:`~iris.cube.Cube.collapsed` and - :meth:`~iris.cube.Cube.aggregated_by`. For example:: - - result = cube.collapsed('longitude', iris.analysis.MEAN) - - A variety of ready-made aggregators are provided in this module, such - as :data:`~iris.analysis.MEAN` and :data:`~iris.analysis.MAX`. Custom - aggregators can also be created for special purposes, see - :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py` - for a worked example. - """ #: Cube cell method string. self.cell_method = cell_method @@ -509,7 +497,8 @@ def __init__( self._kwargs = kwargs def lazy_aggregate(self, data, axis, **kwargs): - """ + """Perform aggregation over the data with a lazy operation. + Perform aggregation over the data with a lazy operation, analogous to the 'aggregate' result. @@ -518,25 +507,22 @@ def lazy_aggregate(self, data, axis, **kwargs): This function is usually used in conjunction with update_metadata(), which should be passed the same keyword arguments. - Args: - - * data (array): - A lazy array (:class:`dask.array.Array`). - - * axis (int or list of int): + Parameters + ---------- + data : :class:`dask.array.Array` + A lazy array. + axis: int or list of int The dimensions to aggregate over -- note that this is defined differently to the 'aggregate' method 'axis' argument, which only accepts a single dimension index. - - Kwargs: - - * kwargs: + **kwargs: All keyword arguments are passed through to the data aggregation function. - Returns: - A lazy array representing the aggregation operation - (:class:`dask.array.Array`). + Returns + ------- + :class:`dask.array.Array` + A lazy array representing the aggregation operation. """ if self.lazy_func is None: @@ -550,37 +536,32 @@ def lazy_aggregate(self, data, axis, **kwargs): return self.lazy_func(data, axis=axis, **kwargs) def aggregate(self, data, axis, **kwargs): - """ - Perform the aggregation function given the data. + """Perform the aggregation function given the data. Keyword arguments are passed through to the data aggregation function (for example, the "percent" keyword for a percentile aggregator). This function is usually used in conjunction with update_metadata(), which should be passed the same keyword arguments. - Args: - - * data (array): + Parameters + ---------- + data : array Data array. - - * axis (int): + axis : int Axis to aggregate over. - - Kwargs: - - * mdtol (float): + mdtol : float, optional Tolerance of missing data. The value returned will be masked if the fraction of data to missing data is less than or equal to mdtol. mdtol=0 means no missing data is tolerated while mdtol=1 will return the resulting value from the aggregation function. Defaults to 1. - - * kwargs: + **kwargs: All keyword arguments apart from those specified above, are passed through to the data aggregation function. - Returns: - The aggregated data. + Returns + ------- + The aggregated data. """ kwargs = dict(list(self._kwargs.items()) + list(kwargs.items())) @@ -604,21 +585,18 @@ def aggregate(self, data, axis, **kwargs): return result def update_metadata(self, cube, coords, **kwargs): - """ - Update common cube metadata w.r.t the aggregation function. - - Args: + """Update common cube metadata w.r.t the aggregation function. - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` Source cube that requires metadata update. - * coords (:class:`iris.coords.Coord`): + coords : :class:`iris.coords.Coord` The one or more coordinates that were aggregated. - - Kwargs: - - * This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords (for example, the "ddof" - keyword for a standard deviation aggregator). + **kwargs : + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords (for example, the "ddof" + keyword for a standard deviation aggregator). """ # Update the units if required. @@ -630,51 +608,45 @@ def update_metadata(self, cube, coords, **kwargs): cube.units = self.units_func(cube.units, **kwargs) def post_process(self, collapsed_cube, data_result, coords, **kwargs): - """ - Process the result from :func:`iris.analysis.Aggregator.aggregate`. + """Process the result from :func:`iris.analysis.Aggregator.aggregate`. - Args: - - * collapsed_cube: - A :class:`iris.cube.Cube`. - * data_result: + Parameters + ---------- + collapsed_cube: :class:`iris.cube.Cube`. + data_result : Result from :func:`iris.analysis.Aggregator.aggregate` - * coords: + coords : The one or more coordinates that were aggregated over. + **kwargs : + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords (for example, the "ddof" + keyword from a standard deviation aggregator). - Kwargs: - - * This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords (for example, the "ddof" - keyword from a standard deviation aggregator). - - Returns: - The collapsed cube with its aggregated data payload. + Returns + ------- + The collapsed cube with its aggregated data payload. """ collapsed_cube.data = data_result return collapsed_cube def aggregate_shape(self, **kwargs): - """ - The shape of the new dimension/s created by the aggregator. - - Kwargs: + """Shape of the new dimension/s created by the aggregator. - * This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords. + Parameters + ---------- + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords. - Returns: - A tuple of the aggregate shape. + Returns + ------- + A tuple of the aggregate shape. """ return () def name(self): - """ - Returns the name of the aggregator. - - """ + """Return the name of the aggregator.""" try: name = "_".join(self.cell_method.split()) except AttributeError: @@ -683,42 +655,38 @@ def name(self): class PercentileAggregator(_Aggregator): - """ - The :class:`PercentileAggregator` class provides percentile aggregation - functionality. + """Provide percentile aggregation functionality. This aggregator *may* introduce a new dimension to the data for the statistic being calculated, but only if more than one quantile is required. For example, calculating the 50th and 90th percentile will result in a new data dimension with an extent of 2, for each of the quantiles calculated. + This aggregator can used by cube aggregation methods such as + :meth:`~iris.cube.Cube.collapsed` and + :meth:`~iris.cube.Cube.aggregated_by`. For example:: + + cube.collapsed('longitude', iris.analysis.PERCENTILE, percent=50) + """ def __init__(self, units_func=None, **kwargs): - r""" - Create a percentile aggregator. + r"""Create a percentile aggregator. - Kwargs: - - * units_func (callable): - | *Call signature*: (units, \**kwargs) + Parameters + ---------- + units_func : callable, optional + Call signature: ``(units, **kwargs)``. If provided, called to convert a cube's units. Returns an :class:`cf_units.Unit`, or a value that can be made into one. To ensure backwards-compatibility, also accepts a callable with call signature (units). - - Additional kwargs: + **kwargs : Passed through to :data:`call_func`, :data:`lazy_func`, and :data:`units_func`. - This aggregator can used by cube aggregation methods such as - :meth:`~iris.cube.Cube.collapsed` and - :meth:`~iris.cube.Cube.aggregated_by`. For example:: - - cube.collapsed('longitude', iris.analysis.PERCENTILE, percent=50) - """ self._name = "percentile" self._args = ["percent"] @@ -732,9 +700,7 @@ def __init__(self, units_func=None, **kwargs): ) def _base_aggregate(self, data, axis, lazy, **kwargs): - """ - Method to avoid duplication of checks in aggregate and lazy_aggregate. - """ + """Avoid duplication of checks in aggregate and lazy_aggregate.""" msg = "{} aggregator requires the mandatory keyword argument {!r}." for arg in self._args: if arg not in kwargs: @@ -751,44 +717,39 @@ def _base_aggregate(self, data, axis, lazy, **kwargs): return _Aggregator.aggregate(self, data, axis, **kwargs) def aggregate(self, data, axis, **kwargs): - """ - Perform the percentile aggregation over the given data. + """Perform the percentile aggregation over the given data. Keyword arguments are passed through to the data aggregation function (for example, the "percent" keyword for a percentile aggregator). This function is usually used in conjunction with update_metadata(), which should be passed the same keyword arguments. - Args: - - * data (array): + Parameters + ---------- + data : array Data array. - - * axis (int): + axis : int Axis to aggregate over. - - Kwargs: - - * mdtol (float): + mdtol : float, optional Tolerance of missing data. The value returned will be masked if the fraction of data to missing data is less than or equal to mdtol. mdtol=0 means no missing data is tolerated while mdtol=1 will return the resulting value from the aggregation function. Defaults to 1. - - * kwargs: + **kwargs : All keyword arguments apart from those specified above, are passed through to the data aggregation function. - Returns: - The aggregated data. + Returns + ------- + The aggregated data. """ - return self._base_aggregate(data, axis, lazy=False, **kwargs) def lazy_aggregate(self, data, axis, **kwargs): - """ + """Perform aggregation over the data with a lazy operation. + Perform aggregation over the data with a lazy operation, analogous to the 'aggregate' result. @@ -797,51 +758,44 @@ def lazy_aggregate(self, data, axis, **kwargs): This function is usually used in conjunction with update_metadata(), which should be passed the same keyword arguments. - Args: - - * data (array): - A lazy array (:class:`dask.array.Array`). - - * axis (int or list of int): + Parameters + ---------- + data : :class:`dask.array.Array` + A lazy array. + axis : int or list of int The dimensions to aggregate over -- note that this is defined differently to the 'aggregate' method 'axis' argument, which only accepts a single dimension index. - - Kwargs: - - * kwargs: + **kwargs : All keyword arguments are passed through to the data aggregation function. - Returns: - A lazy array representing the result of the aggregation operation - (:class:`dask.array.Array`). + Returns + ------- + :class:`dask.array.Array` + A lazy array representing the result of the aggregation operation. """ - return self._base_aggregate(data, axis, lazy=True, **kwargs) def post_process(self, collapsed_cube, data_result, coords, **kwargs): - """ - Process the result from :func:`iris.analysis.Aggregator.aggregate`. - - Args: + """Process the result from :func:`iris.analysis.Aggregator.aggregate`. - * collapsed_cube: - A :class:`iris.cube.Cube`. - * data_result: + Parameters + ---------- + collapsed_cube : :class:`iris.cube.Cube` + data_result : Result from :func:`iris.analysis.Aggregator.aggregate` - * coords: + coords : The one or more coordinates that were aggregated over. + **kwargs : + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords (for example, the "percent" + keywords from a percentile aggregator). - Kwargs: - - * This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords (for example, the "percent" - keywords from a percentile aggregator). - - Returns: - The collapsed cube with it's aggregated data payload. + Returns + ------- + The collapsed cube with it's aggregated data payload. """ cubes = iris.cube.CubeList() @@ -885,19 +839,19 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): return result def aggregate_shape(self, **kwargs): - """ - The shape of the additive dimension created by the aggregator. - - Kwargs: + """Shape of the additive dimension created by the aggregator. - * This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords. + Parameters + ---------- + **kwargs : + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords. - Returns: - A tuple of the additive dimension shape. + Returns + ------- + A tuple of the additive dimension shape. """ - msg = "{} aggregator requires the mandatory keyword argument {!r}." for arg in self._args: if arg not in kwargs: @@ -917,17 +871,12 @@ def aggregate_shape(self, **kwargs): return shape def name(self): - """ - Returns the name of the aggregator. - - """ + """Return the name of the aggregator.""" return self._name class WeightedPercentileAggregator(PercentileAggregator): - """ - The :class:`WeightedPercentileAggregator` class provides percentile - aggregation functionality. + """Provides percentile aggregation functionality. This aggregator *may* introduce a new dimension to the data for the statistic being calculated, but only if more than one quantile is required. @@ -937,17 +886,17 @@ class WeightedPercentileAggregator(PercentileAggregator): """ def __init__(self, units_func=None, lazy_func=None, **kwargs): - r""" - Create a weighted percentile aggregator. + r"""Create a weighted percentile aggregator. - Kwargs: - - * units_func (callable): - | *Call signature*: (units, \**kwargs) + Parameters + ---------- + units_func : callable + | *Call signature*: ``(units, **kwargs)``. If provided, called to convert a cube's units. Returns an :class:`cf_units.Unit`, or a value that can be made into one. + To ensure backwards-compatibility, also accepts a callable with call signature (units). @@ -960,19 +909,21 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): `weights` given to the aggregator (``None`` if no weights are given). See :ref:`user guide ` for an example of weighted aggregation that changes units. - - * lazy_func (callable or None): + lazy_func : callable or None An alternative to :data:`call_func` implementing a lazy aggregation. Note that, it need not support all features of the main operation, but should raise an error in unhandled cases. - - Additional kwargs: + **kwargs : Passed through to :data:`call_func`, :data:`lazy_func`, and :data:`units_func`. + Notes + ----- This aggregator can used by cube aggregation methods such as :meth:`~iris.cube.Cube.collapsed` and - :meth:`~iris.cube.Cube.aggregated_by`. For example:: + :meth:`~iris.cube.Cube.aggregated_by`. + + For example:: cube.collapsed('longitude', iris.analysis.WPERCENTILE, percent=50, weights=iris.analysis.cartography.area_weights(cube)) @@ -994,28 +945,26 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): self._weighting_keywords = ["returned", "weights"] def post_process(self, collapsed_cube, data_result, coords, **kwargs): - """ - Process the result from :func:`iris.analysis.Aggregator.aggregate`. + """Process the result from :func:`iris.analysis.Aggregator.aggregate`. Returns a tuple(cube, weights) if a tuple(data, weights) was returned from :func:`iris.analysis.Aggregator.aggregate`. - Args: - - * collapsed_cube: - A :class:`iris.cube.Cube`. - * data_result: + Parameters + ---------- + collapsed_cube : :class:`iris.cube.Cube` + data_result : Result from :func:`iris.analysis.Aggregator.aggregate` - * coords: + coords : The one or more coordinates that were aggregated over. - - Kwargs: - - * This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords (for example, the "weights" - keyword). - - Returns: + **kwargs : + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords (for example, the "weights" + keyword). + + Returns + ------- + collapsed cube The collapsed cube with it's aggregated data payload. Or a tuple pair of (cube, weights) if the keyword "returned" is specified and True. @@ -1037,27 +986,21 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): class Aggregator(_Aggregator): - """ - The :class:`Aggregator` class provides common aggregation functionality. - - """ + """The :class:`Aggregator` class provides common aggregation functionality.""" def update_metadata(self, cube, coords, **kwargs): - """ - Update cube cell method metadata w.r.t the aggregation function. - - Args: + """Update cube cell method metadata w.r.t the aggregation function. - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` Source cube that requires metadata update. - * coords (:class:`iris.coords.Coord`): + coords : :class:`iris.coords.Coord` The one or more coordinates that were aggregated. - - Kwargs: - - * This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords (for example, the "ddof" - keyword for a standard deviation aggregator). + **kwargs : + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords (for example, the "ddof" + keyword for a standard deviation aggregator). """ _Aggregator.update_metadata(self, cube, coords, **kwargs) @@ -1083,29 +1026,21 @@ def update_metadata(self, cube, coords, **kwargs): class WeightedAggregator(Aggregator): - """ - Convenience class that supports common weighted aggregation functionality. - - """ + """Convenience class that supports common weighted aggregation functionality.""" def __init__( self, cell_method, call_func, units_func=None, lazy_func=None, **kwargs ): - r""" - Create a weighted aggregator for the given :data:`call_func`. - - Args: + r"""Create a weighted aggregator for the given :data:`call_func`. - * cell_method (string): + Parameters + ---------- + cell_method : str Cell method string that supports string format substitution. - - * call_func (callable): + call_func : callable Data aggregation function. Call signature `(data, axis, \**kwargs)`. - - Kwargs: - - * units_func (callable): + units_func : callable, optional | *Call signature*: (units, \**kwargs) If provided, called to convert a cube's units. @@ -1124,12 +1059,11 @@ def __init__( given). See :ref:`user guide ` for an example of weighted aggregation that changes units. - * lazy_func (callable or None): + lazy_func : callable, optional An alternative to :data:`call_func` implementing a lazy aggregation. Note that, it need not support all features of the main operation, but should raise an error in unhandled cases. - - Additional kwargs: + ** kwargs : Passed through to :data:`call_func`, :data:`lazy_func`, and :data:`units_func`. @@ -1147,16 +1081,16 @@ def __init__( self._weighting_keywords = ["returned", "weights"] def uses_weighting(self, **kwargs): - """ - Determine whether this aggregator uses weighting. - - Kwargs: + """Determine whether this aggregator uses weighting. - * kwargs: + Parameters + ---------- + **kwargs : Arguments to filter of weighted keywords. - Returns: - Boolean. + Returns + ------- + bool """ result = False @@ -1167,29 +1101,27 @@ def uses_weighting(self, **kwargs): return result def post_process(self, collapsed_cube, data_result, coords, **kwargs): - """ - Process the result from :func:`iris.analysis.Aggregator.aggregate`. + """Process the result from :func:`iris.analysis.Aggregator.aggregate`. Returns a tuple(cube, weights) if a tuple(data, weights) was returned from :func:`iris.analysis.Aggregator.aggregate`. - Args: - - * collapsed_cube: - A :class:`iris.cube.Cube`. - * data_result: + Parameters + ---------- + collapsed_cube : :class:`iris.cube.Cube` + data_result : Result from :func:`iris.analysis.Aggregator.aggregate` - * coords: + coords : The one or more coordinates that were aggregated over. - - Kwargs: - - * This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords (for example, the "weights" - keywords from a mean aggregator). - - Returns: - The collapsed cube with it's aggregated data payload. Or a tuple + **kwargs : + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords (for example, the "weights" + keywords from a mean aggregator). + + Returns + ------- + The collapsed cube + The collapsed cube with it's aggregated data payload. Or a tuple pair of (cube, weights) if the keyword "returned" is specified and True. @@ -1219,9 +1151,9 @@ class _Weights: def __init__(self, weights, cube): """Initialize class instance. - Args: - - * weights (Cube, string, _DimensionalMetadata, array-like): + Parameters + ---------- + weights : cube, str, _DimensionalMetadata, array-like If given as a :class:`iris.cube.Cube`, use its data and units. If given as a :obj:`str` or :class:`iris.coords._DimensionalMetadata`, assume this is (the name of) a @@ -1231,7 +1163,7 @@ def __init__(self, weights, cube): :meth:`iris.cube.Cube.ancillary_variables`). If given as an array-like object, use this directly and assume units of `1`. Note: this does **not** create a copy of the input array. - * cube (Cube): + cube : cube Input cube for aggregation. If weights is given as :obj:`str` or :class:`iris.coords._DimensionalMetadata`, try to extract the :class:`iris.coords._DimensionalMetadata` object and corresponding @@ -1275,23 +1207,23 @@ def __init__(self, weights, cube): def create_weighted_aggregator_fn(aggregator_fn, axis, **kwargs): """Return an aggregator function that can explicitly handle weights. - Args: - - * aggregator_fn (callable): + Parameters + ---------- + aggregator_fn : callable An aggregator function, i.e., a callable that takes arguments ``data``, ``axis`` and ``**kwargs`` and returns an array. Examples: :meth:`Aggregator.aggregate`, :meth:`Aggregator.lazy_aggregate`. This function should accept the keyword argument ``weights``. - * axis (int): + axis : int Axis to aggregate over. This argument is directly passed to ``aggregator_fn``. + **kwargs : + Arbitrary keyword arguments passed to ``aggregator_fn``. Should not + include ``weights`` (this will be removed if present). - Kwargs: - - * Arbitrary keyword arguments passed to ``aggregator_fn``. Should not - include ``weights`` (this will be removed if present). - - Returns: + Returns + ------- + function A function that takes two arguments ``data_arr`` and ``weights`` (both should be an array of the same shape) and returns an array. @@ -1310,8 +1242,7 @@ def new_aggregator_fn(data_arr, weights): def _build_dask_mdtol_function(dask_stats_function): - """ - Make a wrapped dask statistic function that supports the 'mdtol' keyword. + """Make a wrapped dask statistic function that supports the 'mdtol' keyword. 'dask_function' must be a dask statistical function, compatible with the call signature : "dask_stats_function(data, axis=axis, **kwargs)". @@ -1353,7 +1284,8 @@ def inner_stat(array, axis=-1, mdtol=None, **kwargs): def _axis_to_single_trailing(stats_function): - """ + """Given a statistical function that acts on the trailing axis. + Given a statistical function that acts on the trailing axis of a 1D or 2D array, wrap it so that higher dimension arrays can be passed, as well as any axis as int or tuple. @@ -1389,10 +1321,7 @@ def inner_stat(data, axis, *args, **kwargs): def _calc_percentile(data, percent, fast_percentile_method=False, **kwargs): - """ - Calculate percentiles along the trailing axis of a 1D or 2D array. - - """ + """Calculate percentiles along the trailing axis of a 1D or 2D array.""" if fast_percentile_method: if kwargs.pop("error_on_masked", False): msg = ( @@ -1422,26 +1351,23 @@ def _calc_percentile(data, percent, fast_percentile_method=False, **kwargs): @_axis_to_single_trailing def _percentile(data, percent, fast_percentile_method=False, **kwargs): - """ - The percentile aggregator is an additive operation. This means that - it *may* introduce a new dimension to the data for the statistic being - calculated, but only if more than one percentile point is requested. + """Percentile aggregator is an additive operation. + + This means that it *may* introduce a new dimension to the data for the + statistic being calculated, but only if more than one percentile point is + requested. If a new additive dimension is formed, then it will always be the last dimension of the resulting percentile data payload. - Args: - - * data (array-like) + Parameters + ---------- + dataM : array-like array from which percentiles are to be calculated - - Kwargs: - - * fast_percentile_method (boolean) + fast_percentile_method: bool, optional When set to True, uses the numpy.percentiles method as a faster alternative to the scipy.mstats.mquantiles method. Does not handle masked arrays. - **kwargs : dict, optional passed to scipy.stats.mstats.mquantiles if fast_percentile_method is False. Otherwise passed to numpy.percentile. @@ -1472,26 +1398,26 @@ def _percentile(data, percent, fast_percentile_method=False, **kwargs): def _weighted_quantile_1D(data, weights, quantiles, **kwargs): - """ - Compute the weighted quantile of a 1D numpy array. + """Compute the weighted quantile of a 1D numpy array. Adapted from `wquantiles `_ - Args: - - * data (array) + Parameters + ---------- + data : array One dimensional data array - * weights (array) + weights: array Array of the same size of `data`. If data is masked, weights must have matching mask. - * quantiles : (float or sequence of floats) + quantiles : float or sequence of floats Quantile(s) to compute. Must have a value between 0 and 1. - - **kwargs + **kwargs : passed to `scipy.interpolate.interp1d` - Returns: - array or float. Calculated quantile values (set to np.nan wherever sum + Returns + ------- + array or float. + Calculated quantile values (set to np.nan wherever sum of weights is zero or masked) """ # Return np.nan if no usable points found @@ -1517,30 +1443,27 @@ def _weighted_quantile_1D(data, weights, quantiles, **kwargs): def _weighted_percentile(data, axis, weights, percent, returned=False, **kwargs): - """ - The weighted_percentile aggregator is an additive operation. This means - that it *may* introduce a new dimension to the data for the statistic being - calculated, but only if more than one percentile point is requested. + """Weighted_percentile aggregator is an additive operation. + + This means that it *may* introduce a new dimension to the data for the + statistic being calculated, but only if more than one percentile point is + requested. If a new additive dimension is formed, then it will always be the last dimension of the resulting percentile data payload. - Args: - - * data: ndarray or masked array - - * axis: int - axis to calculate percentiles over - - * weights: ndarray - array with the weights. Must have same shape as data - - * percent: float or sequence of floats - Percentile rank/s at which to extract value/s. - - * returned: bool, optional - Default False. If True, returns a tuple with the percentiles as the - first element and the sum of the weights as the second element. + Parameters + ---------- + data : ndarray or masked array + axis : int + axis to calculate percentiles over + weights : ndarray + array with the weights. Must have same shape as data + percent : float or sequence of floats + Percentile rank/s at which to extract value/s. + returned : bool, optional + Default False. If True, returns a tuple with the percentiles as the + first element and the sum of the weights as the second element. """ # Ensure that data and weights arrays are same shape. @@ -1593,10 +1516,10 @@ def _weighted_percentile(data, axis, weights, percent, returned=False, **kwargs) def _count(array, **kwargs): - """ - Counts the number of points along the axis that satisfy the condition - specified by ``function``. Uses Dask's support for NEP13/18 to work as - either a lazy or a real function. + """Count number of points along the axis that satisfy the condition. + + Condition specified by ``function``. Uses Dask's support for NEP13/18 to + work as either a lazy or a real function. """ func = kwargs.pop("function", None) @@ -1637,9 +1560,7 @@ def _proportion(array, function, axis, **kwargs): def _lazy_max_run(array, axis=-1, **kwargs): - """ - Lazily perform the calculation of maximum run lengths along the given axis - """ + """Lazily perform the calculation of maximum run lengths along the given axis.""" array = iris._lazy_data.as_lazy_data(array) func = kwargs.pop("function", None) if not callable(func): @@ -1691,9 +1612,10 @@ def _lazy_rms(array, axis, **kwargs): def _sum(array, **kwargs): - """ - Weighted or scaled sum. Uses Dask's support for NEP13/18 to work as either - a lazy or a real function. + """Weighted or scaled sum. + + Uses Dask's support for NEP13/18 to work as either a lazy or a real + function. """ axis_in = kwargs.get("axis", None) @@ -1856,24 +1778,28 @@ def interp_order(length): of :class:`~iris.cube.Cube` data occurrences that satisfy a particular criterion, as defined by a user supplied *function*. -**Required** kwargs associated with the use of this aggregator: - -* function (callable): +Parameters +---------- +function : callable A function which converts an array of data values into a corresponding array of True/False values. -**For example**: - +Examples +-------- To compute the number of *ensemble members* with precipitation exceeding 10 (in cube data units) could be calculated with:: result = precip_cube.collapsed('ensemble_member', iris.analysis.COUNT, function=lambda values: values > 10) -.. seealso:: The :func:`~iris.analysis.PROPORTION` aggregator. - This aggregator handles masked data and lazy data. +See Also +-------- +PROPORTION : Aggregator instance. +Aggregator : Aggregator Class + + """ @@ -1888,21 +1814,22 @@ def interp_order(length): :class:`~iris.cube.Cube` data occurrences that satisfy a particular criterion, as defined by a user supplied *function*, along the given axis. -**Required** kwargs associated with the use of this aggregator: - -* function (callable): +Parameters +---------- +function : callable A function which converts an array of data values into a corresponding array of True/False values. -**For example**: - +Examples +-------- The longest run of days with precipitation exceeding 10 (in cube data units) at each grid location could be calculated with:: result = precip_cube.collapsed('time', iris.analysis.MAX_RUN, function=lambda values: values > 10) -This aggregator handles masked data, which it treats as interrupting a run, and lazy data. +This aggregator handles masked data, which it treats as interrupting a run, +and lazy data. """ MAX_RUN.name = lambda: "max_run" @@ -1914,8 +1841,8 @@ def interp_order(length): geometric mean over a :class:`~iris.cube.Cube`, as computed by :func:`scipy.stats.mstats.gmean`. -**For example**: - +Examples +-------- To compute zonal geometric means over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.GMEAN) @@ -1931,8 +1858,8 @@ def interp_order(length): harmonic mean over a :class:`~iris.cube.Cube`, as computed by :func:`scipy.stats.mstats.hmean`. -**For example**: - +Examples +-------- To compute zonal harmonic mean over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.HMEAN) @@ -1955,26 +1882,26 @@ def interp_order(length): the mean over a :class:`~iris.cube.Cube`, as computed by :func:`numpy.ma.average`. -Additional kwargs associated with the use of this aggregator: - -* mdtol (float): +Parameters +---------- +mdtol : float, optional Tolerance of missing data. The value returned in each element of the returned array will be masked if the fraction of masked data contributing to that element exceeds mdtol. This fraction is calculated based on the number of masked elements. mdtol=0 means no missing data is tolerated while mdtol=1 means the resulting element will be masked if and only if all the contributing elements are masked. Defaults to 1. -* weights (float ndarray): +weights : float ndarray, optional Weights matching the shape of the cube or the length of the window for rolling window operations. Note that, latitude/longitude area weights can be calculated using :func:`iris.analysis.cartography.area_weights`. -* returned (boolean): +returned : bool, optional Set this to True to indicate that the collapsed weights are to be returned along with the collapsed data. Defaults to False. -**For example**: - +Examples +-------- To compute zonal means over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.MEAN) @@ -2002,8 +1929,8 @@ def interp_order(length): the median over a :class:`~iris.cube.Cube`, as computed by :func:`numpy.ma.median`. -**For example**: - +Examples +-------- To compute zonal medians over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.MEDIAN) @@ -2021,8 +1948,8 @@ def interp_order(length): the minimum over a :class:`~iris.cube.Cube`, as computed by :func:`numpy.ma.min`. -**For example**: - +Examples +-------- To compute zonal minimums over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.MIN) @@ -2038,8 +1965,8 @@ def interp_order(length): the maximum over a :class:`~iris.cube.Cube`, as computed by :func:`numpy.ma.max`. -**For example**: - +Examples +-------- To compute zonal maximums over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.MAX) @@ -2064,8 +1991,8 @@ def interp_order(length): If multiple coordinates are specified, then the peak calculations are performed individually, in sequence, for each coordinate specified. -**For example**: - +Examples +-------- To compute the peak over the *time* axis of a cube:: result = cube.collapsed('time', iris.analysis.PEAK) @@ -2084,10 +2011,8 @@ def interp_order(length): Parameters ---------- - percent : float or sequence of floats Percentile rank/s at which to extract value/s. - alphap : float, default=1 Plotting positions parameter, see :func:`scipy.stats.mstats.mquantiles`. betap : float, default=1 @@ -2097,13 +2022,11 @@ def interp_order(length): alternative to the :func:`scipy.stats.mstats.mquantiles` method. An exception is raised if the data are masked and the missing data tolerance is not 0. - **kwargs : dict, optional Passed to :func:`scipy.stats.mstats.mquantiles` or :func:`numpy.percentile`. -Example -------- - +Examples +-------- To compute the 10th and 90th percentile over *time*:: result = cube.collapsed('time', iris.analysis.PERCENTILE, percent=[10, 90]) @@ -2132,14 +2055,14 @@ def interp_order(length): that satisfy a particular criterion, as defined by a user supplied *function*. -**Required** kwargs associated with the use of this aggregator: - -* function (callable): +Parameters +---------- +function : callable A function which converts an array of data values into a corresponding array of True/False values. -**For example**: - +Examples +-------- To compute the probability of precipitation exceeding 10 (in cube data units) across *ensemble members* could be calculated with:: @@ -2177,7 +2100,6 @@ def interp_order(length): Example ------- - To compute the zonal root mean square over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.RMS) @@ -2198,14 +2120,14 @@ def interp_order(length): the standard deviation over a :class:`~iris.cube.Cube`, as computed by :func:`numpy.ma.std`. -Additional kwargs associated with the use of this aggregator: - -* ddof (integer): +Parameters +---------- +ddof : int, optioonal Delta degrees of freedom. The divisor used in calculations is N - ddof, where N represents the number of elements. Defaults to 1. -**For example**: - +Examples +-------- To compute zonal standard deviations over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.STD_DEV) @@ -2233,19 +2155,19 @@ def interp_order(length): An :class:`~iris.analysis.Aggregator` instance that calculates the sum over a :class:`~iris.cube.Cube`, as computed by :func:`numpy.ma.sum`. -Additional kwargs associated with the use of this aggregator: - -* weights (float ndarray): +Parameters +---------- +weights : float ndarray, optional Weights matching the shape of the cube, or the length of the window for rolling window operations. Weights should be normalized before using them with this aggregator if scaling is not intended. -* returned (boolean): +returned : bool, optional Set this to True to indicate the collapsed weights are to be returned along with the collapsed data. Defaults to False. -**For example**: - +Examples +-------- To compute an accumulation over the *time* axis of a cube:: result = cube.collapsed('time', iris.analysis.SUM) @@ -2273,14 +2195,14 @@ def interp_order(length): the variance over a :class:`~iris.cube.Cube`, as computed by :func:`numpy.ma.var`. -Additional kwargs associated with the use of this aggregator: - -* ddof (integer): +Parameters +---------- +ddof : int, optional Delta degrees of freedom. The divisor used in calculations is N - ddof, where N represents the number of elements. Defaults to 1. -**For example**: - +Examples +-------- To compute zonal variance over the *longitude* axis of a cube:: result = cube.collapsed('longitude', iris.analysis.VARIANCE) @@ -2303,27 +2225,22 @@ def interp_order(length): An :class:`~iris.analysis.WeightedPercentileAggregator` instance that calculates the weighted percentile over a :class:`~iris.cube.Cube`. -**Required** kwargs associated with the use of this aggregator: - -* percent (float or sequence of floats): +Parameters +---------- +percent : float or sequence of floats Percentile rank/s at which to extract value/s. - -* weights (float ndarray): +weights : float ndarray Weights matching the shape of the cube or the length of the window for rolling window operations. Note that, latitude/longitude area weights can be calculated using :func:`iris.analysis.cartography.area_weights`. - -Additional kwargs associated with the use of this aggregator: - -* returned (boolean): +returned : bool, optional Set this to True to indicate that the collapsed weights are to be returned along with the collapsed data. Defaults to False. - -* kind (string or int): +kind : str or int, optional Specifies the kind of interpolation used, see :func:`scipy.interpolate.interp1d` Defaults to "linear", which is - equivalent to alphap=0.5, betap=0.5 in `iris.analysis.PERCENTILE` + equivalent to alphap=0.5, betap=0.5 in :data:`~iris.analysis.PERCENTILE` Notes ------ @@ -2334,9 +2251,7 @@ def interp_order(length): class _Groupby: - """ - Convenience class to determine group slices over one or more group-by - coordinates. + """Determine group slices over one or more group-by coordinates. Generate the coordinate slices for the groups and calculate the new group-by coordinates and the new shared coordinates given the @@ -2361,22 +2276,17 @@ def __init__( shared_coords: Optional[list[tuple[iris.coords.Coord, int]]] = None, climatological: bool = False, ) -> None: - """ - Determine the group slices over the group-by coordinates. - - Args: + """Determine the group slices over the group-by coordinates. - * groupby_coords (list :class:`iris.coords.Coord` instances): + Parameters + ---------- + groupby_coords : list of :class:`iris.coords.Coord` One or more coordinates from the same axis over which to group-by. - - Kwargs: - - * shared_coords (list of (:class:`iris.coords.Coord`, `int`) pairs): + shared_coords : list of (:class:`iris.coords.Coord`, `int`) pairs One or more coordinates (including multidimensional coordinates) that share the same group-by coordinate axis. The `int` identifies which dimension of the coord is on the group-by coordinate axis. - - * climatological (bool): + climatological : bool Indicates whether the output is expected to be climatological. For any aggregated time coord(s), this causes the climatological flag to be set and the point for each cell to equal its first bound, thereby @@ -2430,15 +2340,14 @@ def _add_shared_coord(self, coord: iris.coords.Coord, dim: int) -> None: self._shared_coords.append((coord, dim)) def group(self) -> list[tuple[int, ...]]: - """ - Calculate the groups and associated slices over one or more group-by - coordinates. + """Calculate groups and associated slices over one or more group-by coordinates. Also creates new group-by and shared coordinates given the calculated group slices. - Returns: - A list of the coordinate group slices. + Returns + ------- + A list of the coordinate group slices. """ if not self._groupby_indices: @@ -2638,15 +2547,13 @@ def __repr__(self) -> str: def clear_phenomenon_identity(cube): - """ - Helper function to clear the standard_name, attributes, and - cell_methods of a cube. - + """Help to clear the standard_name, attributes and cell_methods of a cube. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. + """ cube.rename(None) cube.attributes.clear() @@ -2661,9 +2568,9 @@ def clear_phenomenon_identity(cube): class Linear: - """ - This class describes the linear interpolation and regridding scheme for - interpolating or regridding over one or more orthogonal coordinates, + """Describes the linear interpolation and regridding scheme. + + Use for interpolating or regridding over one or more orthogonal coordinates, typically for use with :meth:`iris.cube.Cube.interpolate()` or :meth:`iris.cube.Cube.regrid()`. @@ -2672,28 +2579,28 @@ class Linear: LINEAR_EXTRAPOLATION_MODES = list(EXTRAPOLATION_MODES.keys()) + ["linear"] def __init__(self, extrapolation_mode="linear"): - """ - Linear interpolation and regridding scheme suitable for interpolating - or regridding over one or more orthogonal coordinates. + """Linear interpolation and regridding scheme. - Kwargs: + Suitable for interpolating or regridding over one or more orthogonal + coordinates. - * extrapolation_mode: + Parameters + ---------- + extrapolation_mode : str Must be one of the following strings: - * 'extrapolate' or 'linear' - The extrapolation points - will be calculated by extending the gradient of the - closest two points. - * 'nan' - The extrapolation points will be be set to NaN. - * 'error' - A ValueError exception will be raised, notifying an - attempt to extrapolate. - * 'mask' - The extrapolation points will always be masked, even - if the source data is not a MaskedArray. - * 'nanmask' - If the source data is a MaskedArray the - extrapolation points will be masked. Otherwise they will be - set to NaN. - - The default mode of extrapolation is 'linear'. + * 'extrapolate' or 'linear' - The extrapolation points + will be calculated by extending the gradient of the + closest two points. + * 'nan' - The extrapolation points will be be set to NaN. + * 'error' - A ValueError exception will be raised, notifying an + attempt to extrapolate. + * 'mask' - The extrapolation points will always be masked, even + if the source data is not a MaskedArray. + * 'nanmask' - If the source data is a MaskedArray the + extrapolation points will be masked. Otherwise they will be + set to NaN. + * The default mode of extrapolation is 'linear'. """ if extrapolation_mode not in self.LINEAR_EXTRAPOLATION_MODES: @@ -2711,8 +2618,9 @@ def _normalised_extrapolation_mode(self): return mode def interpolator(self, cube, coords): - """ - Creates a linear interpolator to perform interpolation over the + """Create a linear interpolator to perform interpolation. + + Create a linear interpolator to perform interpolation over the given :class:`~iris.cube.Cube` specified by the dimensions of the given coordinates. @@ -2721,26 +2629,24 @@ def interpolator(self, cube, coords): constructing your own interpolator is preferable. These are detailed in the :ref:`user guide `. - Args: - - * cube: + Parameters + ---------- + cube : :class:`iris.cube.Cube` The source :class:`iris.cube.Cube` to be interpolated. - * coords: + coords : :class:`iris.cube.Cube` The names or coordinate instances that are to be interpolated over. - Returns: - A callable with the interface: - - `callable(sample_points, collapse_scalar=True)` - + Returns + ------- + A callable with the interface: ``callable(sample_points, collapse_scalar=True)`` where `sample_points` is a sequence containing an array of values for each of the coordinates passed to this method, and - `collapse_scalar` determines whether to remove length one + ``collapse_scalar`` determines whether to remove length one dimensions in the result cube caused by scalar values in - `sample_points`. + ``sample_points``. - The N arrays of values within `sample_points` will be used to + The N arrays of values within ``sample_points`` will be used to create an N-d grid of points that will then be sampled (rather than just N points) @@ -2749,9 +2655,9 @@ def interpolator(self, cube, coords): cftime.datetime instances. For example, for the callable returned by: - `Linear().interpolator(cube, ['latitude', 'longitude'])`, + ``Linear().interpolator(cube, ['latitude', 'longitude'])``, sample_points must have the form - `[new_lat_values, new_lon_values]`. + ``[new_lat_values, new_lon_values]``. """ return RectilinearInterpolator( @@ -2759,7 +2665,8 @@ def interpolator(self, cube, coords): ) def regridder(self, src_grid, target_grid): - """ + """Create a linear regridder to perform regridding. + Creates a linear regridder to perform regridding from the source grid to the target grid. @@ -2772,20 +2679,18 @@ def regridder(self, src_grid, target_grid): `chunks `__ in horizontal dimensions will be combined before regridding. - Args: - - * src_grid: + Parameters + ---------- + src_grid : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the source grid. - * target_grid: + target_grid : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the target grid. - Returns: - A callable with the interface: - - `callable(cube)` - - where `cube` is a cube with the same grid as `src_grid` - that is to be regridded to the `target_grid`. + Returns + ------- + A callable with the interface ``callable(cube)`` + where `cube` is a cube with the same grid as ``src_grid`` + that is to be regridded to the ``target_grid``. """ return RectilinearRegridder( @@ -2797,22 +2702,25 @@ def regridder(self, src_grid, target_grid): class AreaWeighted: - """ + """Describes an area-weighted regridding scheme for regridding. + This class describes an area-weighted regridding scheme for regridding between 'ordinary' horizontal grids with separated X and Y coordinates in a common coordinate system. + Typically for use with :meth:`iris.cube.Cube.regrid()`. """ def __init__(self, mdtol=1): - """ - Area-weighted regridding scheme suitable for regridding between - different orthogonal XY grids in the same coordinate system. + """Area-weighted regridding scheme. - Kwargs: + Suitable for regridding between different orthogonal XY grids in the + same coordinate system. - * mdtol (float): + Parameters + ---------- + mdtol : float Tolerance of missing data. The value returned in each element of the returned array will be masked if the fraction of missing data exceeds mdtol. This fraction is calculated based on the area of @@ -2821,11 +2729,11 @@ def __init__(self, mdtol=1): will be masked if and only if all the overlapping elements of the source grid are masked. Defaults to 1. - .. Note: - Both sourge and target cubes must have an XY grid defined by - separate X and Y dimensions with dimension coordinates. - All of the XY dimension coordinates must also be bounded, and have - the same coordinate system. + .. note:: + Both sourge and target cubes must have an XY grid defined by + separate X and Y dimensions with dimension coordinates. + All of the XY dimension coordinates must also be bounded, and have + the same coordinate system. """ if not (0 <= mdtol <= 1): @@ -2837,7 +2745,8 @@ def __repr__(self): return "AreaWeighted(mdtol={})".format(self.mdtol) def regridder(self, src_grid_cube, target_grid_cube): - """ + """Create an area-weighted regridder to perform regridding. + Creates an area-weighted regridder to perform regridding from the source grid to the target grid. @@ -2850,18 +2759,16 @@ def regridder(self, src_grid_cube, target_grid_cube): `chunks `__ in horizontal dimensions will be combined before regridding. - Args: - - * src_grid_cube: + Parameters + ---------- + src_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the source grid. - * target_grid_cube: + target_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the target grid. - Returns: - A callable with the interface: - - `callable(cube)` - + Returns + ------- + A callable with the interface `callable(cube)` where `cube` is a cube with the same grid as `src_grid_cube` that is to be regridded to the grid of `target_grid_cube`. @@ -2870,36 +2777,36 @@ def regridder(self, src_grid_cube, target_grid_cube): class Nearest: - """ - This class describes the nearest-neighbour interpolation and regridding - scheme for interpolating or regridding over one or more orthogonal + """Describe the nearest-neighbour interpolation and regridding scheme. + + For interpolating or regridding over one or more orthogonal coordinates, typically for use with :meth:`iris.cube.Cube.interpolate()` or :meth:`iris.cube.Cube.regrid()`. """ def __init__(self, extrapolation_mode="extrapolate"): - """ - Nearest-neighbour interpolation and regridding scheme suitable for - interpolating or regridding over one or more orthogonal coordinates. + """Nearest-neighbour interpolation and regridding scheme. - Kwargs: + Suitable for interpolating or regridding over one or more orthogonal + coordinates. - * extrapolation_mode: + Parameters + ---------- + extrapolation_mode : optional Must be one of the following strings: - * 'extrapolate' - The extrapolation points will take their - value from the nearest source point. - * 'nan' - The extrapolation points will be be set to NaN. - * 'error' - A ValueError exception will be raised, notifying an - attempt to extrapolate. - * 'mask' - The extrapolation points will always be masked, even - if the source data is not a MaskedArray. - * 'nanmask' - If the source data is a MaskedArray the - extrapolation points will be masked. Otherwise they will be - set to NaN. - - The default mode of extrapolation is 'extrapolate'. + * 'extrapolate' - The extrapolation points will take their + value from the nearest source point. + * 'nan' - The extrapolation points will be be set to NaN. + * 'error' - A ValueError exception will be raised, notifying an + attempt to extrapolate. + * 'mask' - The extrapolation points will always be masked, even + if the source data is not a MaskedArray. + * 'nanmask' - If the source data is a MaskedArray the + extrapolation points will be masked. Otherwise they will be + set to NaN. + * The default mode of extrapolation is 'extrapolate'. """ if extrapolation_mode not in EXTRAPOLATION_MODES: @@ -2911,7 +2818,11 @@ def __repr__(self): return "Nearest({!r})".format(self.extrapolation_mode) def interpolator(self, cube, coords): - """ + """Perform interpolation over the given :class:`~iris.cube.Cube`. + + Perform interpolation over the given :class:`~iris.cube.Cube` specified + by the dimensions of the specified coordinates. + Creates a nearest-neighbour interpolator to perform interpolation over the given :class:`~iris.cube.Cube` specified by the dimensions of the specified coordinates. @@ -2921,39 +2832,38 @@ def interpolator(self, cube, coords): constructing your own interpolator is preferable. These are detailed in the :ref:`user guide `. - Args: - - * cube: + Parameters + ---------- + cube : The source :class:`iris.cube.Cube` to be interpolated. - * coords: + coords : The names or coordinate instances that are to be interpolated over. - Returns: - A callable with the interface: - - `callable(sample_points, collapse_scalar=True)` - - where `sample_points` is a sequence containing an array of values + Returns + ------- + A callable with the interface `callable(sample_points, collapse_scalar=True)`` + Where ``sample_points`` is a sequence containing an array of values for each of the coordinates passed to this method, and `collapse_scalar` determines whether to remove length one dimensions in the result cube caused by scalar values in - `sample_points`. + ``sample_points``. The values for coordinates that correspond to date/times may optionally be supplied as datetime.datetime or cftime.datetime instances. For example, for the callable returned by: - `Nearest().interpolator(cube, ['latitude', 'longitude'])`, + ``Nearest().interpolator(cube, ['latitude', 'longitude'])``, sample_points must have the form - `[new_lat_values, new_lon_values]`. + ``[new_lat_values, new_lon_values]``. """ return RectilinearInterpolator(cube, coords, "nearest", self.extrapolation_mode) def regridder(self, src_grid, target_grid): - """ + """Create a nearest-neighbour regridder. + Creates a nearest-neighbour regridder to perform regridding from the source grid to the target grid. @@ -2966,19 +2876,17 @@ def regridder(self, src_grid, target_grid): `chunks `__ in horizontal dimensions will be combined before regridding. - Args: - - * src_grid: + Parameters + ---------- + src_grid : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the source grid. - * target_grid: + target_grid : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the target grid. - Returns: - A callable with the interface: - - `callable(cube)` - - where `cube` is a cube with the same grid as `src_grid` + Returns + ------- + A callable with the interface `callable(cube)` + Where `cube` is a cube with the same grid as `src_grid` that is to be regridded to the `target_grid`. """ @@ -2988,7 +2896,8 @@ def regridder(self, src_grid, target_grid): class UnstructuredNearest: - """ + """Nearest-neighbour regridding scheme. + This is a nearest-neighbour regridding scheme for regridding data whose horizontal (X- and Y-axis) coordinates are mapped to the *same* dimensions, rather than being orthogonal on independent dimensions. @@ -3016,10 +2925,11 @@ class UnstructuredNearest: # regridder class, # :class:`iris.analysis.trajectory.UnstructuredNearestNeigbourRegridder`. def __init__(self): - """ - Nearest-neighbour interpolation and regridding scheme suitable for - interpolating or regridding from un-gridded data such as trajectories - or other data where the X and Y coordinates share the same dimensions. + """Nearest-neighbour interpolation and regridding scheme. + + Suitable for interpolating or regridding from un-gridded data such as + trajectories or other data where the X and Y coordinates share the same + dimensions. """ pass @@ -3031,9 +2941,9 @@ def __repr__(self): # def interpolator(self, cube): def regridder(self, src_cube, target_grid): - """ - Creates a nearest-neighbour regridder, of the - :class:`~iris.analysis.trajectory.UnstructuredNearestNeigbourRegridder` + """Create a nearest-neighbour regridder. + + Using the :class:`~iris.analysis.trajectory.UnstructuredNearestNeigbourRegridder` type, to perform regridding from the source grid to the target grid. This can then be applied to any source data with the same structure as @@ -3046,24 +2956,21 @@ def regridder(self, src_cube, target_grid): Does not support lazy regridding. - Args: - - * src_cube: + Parameters + ---------- + src_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the source grid. The X and Y coordinates can have any shape, but must be mapped over the same cube dimensions. - - * target_grid: + target_grid : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the target grid. The X and Y coordinates must be one-dimensional dimension coordinates, mapped to different dimensions. All other cube components are ignored. - Returns: - A callable with the interface: - - `callable(cube)` - + Returns + ------- + A callable with the interface `callable(cube)` where `cube` is a cube with the same grid as `src_cube` that is to be regridded to the `target_grid`. @@ -3074,9 +2981,9 @@ def regridder(self, src_cube, target_grid): class PointInCell: - """ - This class describes the point-in-cell regridding scheme for use - typically with :meth:`iris.cube.Cube.regrid()`. + """Describes the point-in-cell regridding scheme. + + For use typically with :meth:`iris.cube.Cube.regrid()`. Each result datapoint is an average over all source points that fall inside that (bounded) target cell. @@ -3097,7 +3004,8 @@ class PointInCell: """ def __init__(self, weights=None): - """ + """Point-in-cell regridding scheme. + Point-in-cell regridding scheme suitable for regridding from a source cube with X and Y coordinates all on the same dimensions, to a target cube with bounded X and Y coordinates on separate X and Y dimensions. @@ -3105,20 +3013,20 @@ def __init__(self, weights=None): Each result datapoint is an average over all source points that fall inside that (bounded) target cell. - Optional Args: - - * weights: - A :class:`numpy.ndarray` instance that defines the weights - for the grid cells of the source grid. Must have the same shape - as the data of the source grid. + Parameters + ---------- + weights : :class:`numpy.ndarray`, optional + Defines the weights for the grid cells of the source grid. Must + have the same shape as the data of the source grid. If unspecified, equal weighting is assumed. """ self.weights = weights def regridder(self, src_grid, target_grid): - """ - Creates a point-in-cell regridder to perform regridding from the + """Create a point-in-cell regridder. + + Create a point-in-cell regridder to perform regridding from the source grid to the target grid. Typically you should use :meth:`iris.cube.Cube.regrid` for @@ -3128,19 +3036,17 @@ def regridder(self, src_grid, target_grid): Does not support lazy regridding. - Args: - - * src_grid: + Parameters + ---------- + src_grid : The :class:`~iris.cube.Cube` defining the source grid. - * target_grid: + target_grid : The :class:`~iris.cube.Cube` defining the target grid. - Returns: - A callable with the interface: - - `callable(cube)` - - where `cube` is a cube with the same grid as `src_grid` + Returns + ------- + A callable with the interface `callable(cube)` + Where `cube` is a cube with the same grid as `src_grid` that is to be regridded to the `target_grid`. """ diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index ee5d402b89..4dd3171fae 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Calculus operations on :class:`iris.cube.Cube` instances. +"""Calculus operations on :class:`iris.cube.Cube` instances. See also: :mod:`NumPy `. @@ -30,7 +29,8 @@ def _construct_delta_coord(coord): - """ + """Return a coordinate of deltas between the given coordinate's points. + Return a coordinate of deltas between the given coordinate's points. If the original coordinate has length n and is circular then the result will be a coordinate of length n, otherwise the result will be @@ -63,7 +63,8 @@ def _construct_delta_coord(coord): def _construct_midpoint_coord(coord, circular=None): - """ + """Return a coordinate of mid-points from the given coordinate. + Return a coordinate of mid-points from the given coordinate. If the given coordinate has length n and the circular flag set then the result will be a coordinate of length n, otherwise the result will be @@ -128,31 +129,27 @@ def _construct_midpoint_coord(coord, circular=None): def cube_delta(cube, coord): - """ - Given a cube calculate the difference between each value in the - given coord's direction. - - - Args: + """Given a cube calculate the difference between each value in the coord's direction. - * coord + Parameters + ---------- + coord : either a Coord instance or the unique name of a coordinate in the cube. If a Coord instance is provided, it does not necessarily have to exist in the cube. - Example usage:: + Examples + -------- + :: - change_in_temperature_wrt_pressure = \ -cube_delta(temperature_cube, 'pressure') + change_in_temperature_wrt_pressure = cube_delta(temperature_cube, 'pressure') .. note:: Missing data support not yet implemented. .. note:: - This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. - """ # handle the case where a user passes a coordinate name if isinstance(coord, str): @@ -205,19 +202,22 @@ def cube_delta(cube, coord): def differentiate(cube, coord_to_differentiate): - r""" + r"""Calculate the differential of a given cube. + Calculate the differential of a given cube with respect to the coord_to_differentiate. - Args: - - * coord_to_differentiate: + Parameters + ---------- + coord_to_differentiate : Either a Coord instance or the unique name of a coordinate which exists in the cube. If a Coord instance is provided, it does not necessarily have to exist on the cube. - Example usage:: + Examples + -------- + :: u_wind_acceleration = differentiate(u_wind_cube, 'forecast_time') @@ -251,13 +251,13 @@ def differentiate(cube, coord_to_differentiate): where `c` and `b` represent the input coordinate values and bounds, and `C` and `B` the output coordinate values and bounds. - .. note:: Difference method used is the same as :func:`cube_delta` + .. note:: + Difference method used is the same as :func:`cube_delta` and therefore has the same limitations. .. note:: Spherical differentiation does not occur in this routine. .. note:: - This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -283,7 +283,8 @@ def differentiate(cube, coord_to_differentiate): def _curl_subtract(a, b): - """ + """Straight forward wrapper to :func:`iris.analysis.maths.subtract`. + Simple wrapper to :func:`iris.analysis.maths.subtract` to subtract two cubes, which deals with None in a way that makes sense in the context of curl. @@ -308,7 +309,8 @@ def _curl_subtract(a, b): def _curl_differentiate(cube, coord): - """ + """Straight forward wrapper to :func:`differentiate`. + Simple wrapper to :func:`differentiate` to differentiate a cube and deal with None in a way that makes sense in the context of curl. @@ -331,7 +333,8 @@ def _curl_differentiate(cube, coord): def _curl_regrid(cube, prototype): - """ + """Straight forward wrapper to :ref`iris.cube.Cube.regridded`. + Simple wrapper to :ref`iris.cube.Cube.regridded` to deal with None in a way that makes sense in the context of curl. @@ -351,7 +354,8 @@ def _curl_regrid(cube, prototype): def _copy_cube_transformed(src_cube, data, coord_func): - """ + """Return a new cube with the given data with the coordinates transformed. + Returns a new cube based on the src_cube, but with the given data, and with the coordinates transformed via coord_func. @@ -412,12 +416,11 @@ def coord_func(coord): def _coord_sin(coord): - """ - Return a coordinate which represents sin(coord). - - Args: + """Return a coordinate which represents sin(coord). - * coord + Parameters + ---------- + coord : Coord instance with values in either degrees or radians """ @@ -425,12 +428,11 @@ def _coord_sin(coord): def _coord_cos(coord): - """ - Return a coordinate which represents cos(coord). - - Args: + """Return a coordinate which represents cos(coord). - * coord + Parameters + ---------- + coord : Coord instance with values in either degrees or radians """ @@ -438,14 +440,13 @@ def _coord_cos(coord): def _trig_method(coord, trig_function): - """ - Return a coordinate which represents trig_function(coord). + """Return a coordinate which represents trig_function(coord). - Args: - - * coord + Parameters + ---------- + coord : Coord instance with points values in either degrees or radians - * trig_function + trig_function : Reference to a trigonometric function e.g. numpy.sin """ @@ -465,7 +466,8 @@ def _trig_method(coord, trig_function): def curl(i_cube, j_cube, k_cube=None): - r""" + r"""Calculate the 2 or 3-dimensional spherical or cartesian curl. + Calculate the 2-dimensional or 3-dimensional spherical or cartesian curl of the given vector of cubes. @@ -473,20 +475,21 @@ def curl(i_cube, j_cube, k_cube=None): cube to possess a vertical or z-like coordinate (representing some form of height or pressure). This can be a scalar or dimension coordinate. - Args: - - * i_cube + Parameters + ---------- + i_cube : The i cube of the vector to operate on - * j_cube + j_cube : The j cube of the vector to operate on - - Kwargs: - - * k_cube + k_cube : optional The k cube of the vector to operate on - Return (i_cmpt_curl_cube, j_cmpt_curl_cube, k_cmpt_curl_cube) + Returns + ------- + List of cubes i_cmpt_curl_cube, j_cmpt_curl_cube, k_cmpt_curl_cube + Notes + ----- If the k-cube is not passed in then the 2-dimensional curl will be calculated, yielding the result: [None, None, k_cube]. If the k-cube is passed in, the 3-dimensional curl will @@ -501,51 +504,48 @@ def curl(i_cube, j_cube, k_cube=None): GeogCS or RotatedGeogCS, the spherical curl will be calculated; otherwise the cartesian curl will be calculated: - Cartesian curl - - When cartesian calculus is used, i_cube is the u component, - j_cube is the v component and k_cube is the w component. + * Cartesian curl + * When cartesian calculus is used, i_cube is the u component, + j_cube is the v component and k_cube is the w component. - The Cartesian curl is defined as: + The Cartesian curl is defined as: - .. math:: + .. math:: - \nabla\times \vec u = - (\frac{\delta w}{\delta y} - \frac{\delta v}{\delta z})\vec a_i - - - (\frac{\delta w}{\delta x} - \frac{\delta u}{\delta z})\vec a_j - + - (\frac{\delta v}{\delta x} - \frac{\delta u}{\delta y})\vec a_k + \nabla\times \vec u = + (\frac{\delta w}{\delta y} - \frac{\delta v}{\delta z})\vec a_i + - + (\frac{\delta w}{\delta x} - \frac{\delta u}{\delta z})\vec a_j + + + (\frac{\delta v}{\delta x} - \frac{\delta u}{\delta y})\vec a_k - Spherical curl + * Spherical curl + * When spherical calculus is used, i_cube is the :math:`\phi` vector + component (e.g. eastward), j_cube is the :math:`\theta` component + (e.g. northward) and k_cube is the radial component. - When spherical calculus is used, i_cube is the :math:`\phi` vector - component (e.g. eastward), j_cube is the :math:`\theta` component - (e.g. northward) and k_cube is the radial component. + The spherical curl is defined as: - The spherical curl is defined as: + .. math:: - .. math:: + \nabla\times \vec A = \frac{1}{r cos \theta} + (\frac{\delta}{\delta \theta} + (\vec A_\phi cos \theta) - + \frac{\delta \vec A_\theta}{\delta \phi}) \vec r + + \frac{1}{r}(\frac{1}{cos \theta} + \frac{\delta \vec A_r}{\delta \phi} - + \frac{\delta}{\delta r} (r \vec A_\phi))\vec \theta + + \frac{1}{r} + (\frac{\delta}{\delta r}(r \vec A_\theta) - + \frac{\delta \vec A_r}{\delta \theta}) \vec \phi - \nabla\times \vec A = \frac{1}{r cos \theta} - (\frac{\delta}{\delta \theta} - (\vec A_\phi cos \theta) - - \frac{\delta \vec A_\theta}{\delta \phi}) \vec r + - \frac{1}{r}(\frac{1}{cos \theta} - \frac{\delta \vec A_r}{\delta \phi} - - \frac{\delta}{\delta r} (r \vec A_\phi))\vec \theta + - \frac{1}{r} - (\frac{\delta}{\delta r}(r \vec A_\theta) - - \frac{\delta \vec A_r}{\delta \theta}) \vec \phi - - where phi is longitude, theta is latitude. + where phi is longitude, theta is latitude. .. note:: This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. - """ # Get the vector quantity names. # (i.e. ['easterly', 'northerly', 'vertical']) @@ -735,7 +735,8 @@ def curl(i_cube, j_cube, k_cube=None): def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): - """ + """Given spatially dependent cubes, return a list of the spatial coordinate names. + Given 2 or 3 spatially dependent cubes, return a list of the spatial coordinate names with appropriate phenomenon name. @@ -750,7 +751,7 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): (['u', 'v', 'w'], 'wind') Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index e5a704bb56..f447537b7d 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -2,10 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Definitions of derived coordinates. - -""" +"""Definitions of derived coordinates.""" from abc import ABCMeta, abstractmethod import warnings @@ -20,7 +17,8 @@ class AuxCoordFactory(CFVariableMixin, metaclass=ABCMeta): - """ + """Represents a "factory" which can manufacture additional auxiliary coordinate. + Represents a "factory" which can manufacture an additional auxiliary coordinate on demand, by combining the values of other coordinates. @@ -50,10 +48,7 @@ def __init__(self): @property def coord_system(self): - """ - The coordinate-system (if any) of the coordinate made by the factory. - - """ + """The coordinate-system (if any) of the coordinate made by the factory.""" return self._metadata_manager.coord_system @coord_system.setter @@ -62,7 +57,8 @@ def coord_system(self, value): @property def climatological(self): - """ + """Always returns False, as a factory itself can never have points/bounds. + Always returns False, as a factory itself can never have points/bounds and therefore can never be climatological by definition. @@ -72,37 +68,38 @@ def climatological(self): @property @abstractmethod def dependencies(self): - """ - Returns a dictionary mapping from constructor argument names to + """Return a dict mapping from constructor argument. + + Return a dictionary mapping from constructor argument names to the corresponding coordinates. """ @abstractmethod def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this - factory. - - Args: + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. + See :meth:`iris.cube.Cube.coord_dims()`. """ def update(self, old_coord, new_coord=None): - """ - Notifies the factory of the removal/replacement of a coordinate - which might be a dependency. + """Notify the factory of the removal/replacement of a coordinate. - Args: + Notify the factory of the removal/replacement of a coordinate + which might be a dependency. - * old_coord: + Parameters + ---------- + old_coord: The coordinate to be removed/replaced. - * new_coord: + new_coord: optional If None, any dependency using old_coord is removed, otherwise any dependency using old_coord is updated to use new_coord. @@ -130,19 +127,18 @@ def arg_text(item): return "<{}({})>".format(type(self).__name__, ", ".join(args)) def derived_dims(self, coord_dims_func): - """ - Returns the cube dimensions for the derived coordinate. - - Args: + """Return the cube dimensions for the derived coordinate. - * coord_dims_func: - A callable which can return the list of dimensions relevant - to a given coordinate. + Parameters + ---------- + coord_dims_func: + A callable which can return the list of dimensions relevant to a given + coordinate. See :meth:`iris.cube.Cube.coord_dims()`. - Returns: - - A sorted list of cube dimension numbers. + Returns + ------- + A sorted list of cube dimension numbers. """ # Which dimensions are relevant? @@ -158,13 +154,14 @@ def derived_dims(self, coord_dims_func): return derived_dims def updated(self, new_coord_mapping): - """ - Creates a new instance of this factory where the dependencies - are replaced according to the given mapping. + """Create a new instance of this factory. - Args: + Create a new instance of this factory where the dependencies + are replaced according to the given mapping. - * new_coord_mapping: + Parameters + ---------- + new_coord_mapping: A dictionary mapping from the object IDs potentially used by this factory, to the coordinate objects that should be used instead. @@ -178,10 +175,7 @@ def updated(self, new_coord_mapping): return type(self)(**new_dependencies) def xml_element(self, doc): - """ - Returns a DOM element describing this coordinate factory. - - """ + """Return a DOM element describing this coordinate factory.""" element = doc.createElement("coordFactory") for key, coord in self.dependencies.items(): element.setAttribute(key, coord._xml_id()) @@ -197,8 +191,7 @@ def _dependency_dims(self, coord_dims_func): @staticmethod def _nd_bounds(coord, dims, ndim): - """ - Return a lazy bounds array for a dependency coordinate, 'coord'. + """Return a lazy bounds array for a dependency coordinate, 'coord'. The result is aligned to the first 'ndim' cube dimensions, and expanded to the full ('ndim'+1)-dimensional shape. @@ -208,11 +201,12 @@ def _nd_bounds(coord, dims, ndim): The extra final result dimension ('ndim'-th) is the bounds dimension. - Example: + Example:: coord.shape == (70,) coord.nbounds = 2 dims == [3] ndim == 5 + results in: nd_bounds.shape == (1, 1, 1, 70, 1, 2) @@ -233,8 +227,7 @@ def _nd_bounds(coord, dims, ndim): @staticmethod def _nd_points(coord, dims, ndim): - """ - Return a lazy points array for a dependency coordinate, 'coord'. + """Return a lazy points array for a dependency coordinate, 'coord'. The result is aligned to the first 'ndim' cube dimensions, and expanded to the full 'ndim'-dimensional shape. @@ -242,11 +235,15 @@ def _nd_points(coord, dims, ndim): The value of 'ndim' must be >= the highest cube dimension of the dependency coordinate. - Example: + Examples + -------- + :: coord.shape == (4, 3) dims == [3, 2] ndim == 5 - results in: + + results in:: + nd_points.shape == (1, 1, 3, 4, 1) """ @@ -272,8 +269,7 @@ def _nd_points(coord, dims, ndim): return points def _remap(self, dependency_dims, derived_dims): - """ - Return a mapping from dependency names to coordinate points arrays. + """Return a mapping from dependency names to coordinate points arrays. For dependencies that are present, the values are all expanded and aligned to the same dimensions, which is the full set of all the @@ -311,8 +307,7 @@ def _remap(self, dependency_dims, derived_dims): return nd_points_by_key def _remap_with_bounds(self, dependency_dims, derived_dims): - """ - Return a mapping from dependency names to coordinate bounds arrays. + """Return a mapping from dependency names to coordinate bounds arrays. For dependencies that are present, the values are all expanded and aligned to the same dimensions, which is the full set of all the @@ -369,16 +364,19 @@ def _remap_with_bounds(self, dependency_dims, derived_dims): class AtmosphereSigmaFactory(AuxCoordFactory): - """Defines an atmosphere sigma coordinate factory with the formula: - p = ptop + sigma * (ps - ptop) + """Define an atmosphere sigma coordinate factory with the following formula. + + .. math:: + p = ptop + sigma * (ps - ptop) """ def __init__(self, pressure_at_top=None, sigma=None, surface_air_pressure=None): - """Creates an atmosphere sigma coordinate factory with the formula: + """Create an atmosphere sigma coordinate factory with a formula. - p(n, k, j, i) = pressure_at_top + sigma(k) * - (surface_air_pressure(n, j, i) - pressure_at_top) + .. math:: + p(n, k, j, i) = pressure_at_top + sigma(k) * + (surface_air_pressure(n, j, i) - pressure_at_top) """ # Configure the metadata manager. @@ -469,15 +467,14 @@ def _derive(pressure_at_top, sigma, surface_air_pressure): return pressure_at_top + sigma * (surface_air_pressure - pressure_at_top) def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this - factory. - - Args: + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. + See :meth:`iris.cube.Cube.coord_dims()`. """ @@ -537,26 +534,23 @@ def make_coord(self, coord_dims_func): class HybridHeightFactory(AuxCoordFactory): - """ - Defines a hybrid-height coordinate factory with the formula: - z = a + b * orog - - """ + """Defines a hybrid-height coordinate factory.""" def __init__(self, delta=None, sigma=None, orography=None): - """ - Creates a hybrid-height coordinate factory with the formula: + """Create a hybrid-height coordinate factory with the following formula. + + .. math:: z = a + b * orog At least one of `delta` or `orography` must be provided. - Args: - - * delta: Coord + Parameters + ---------- + delta: Coord, optional The coordinate providing the `a` term. - * sigma: Coord + sigma: Coord, optional The coordinate providing the `b` term. - * orography: Coord + orography: Coord, optional The coordinate providing the `orog` term. """ @@ -601,8 +595,9 @@ def __init__(self, delta=None, sigma=None, orography=None): @property def dependencies(self): - """ - Returns a dictionary mapping from constructor argument names to + """Return a dict mapping from constructor arg names to coordinates. + + Return a dictionary mapping from constructor argument names to the corresponding coordinates. """ @@ -616,15 +611,14 @@ def _derive(self, delta, sigma, orography): return delta + sigma * orography def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this - factory. - - Args: + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. + See :meth:`iris.cube.Cube.coord_dims()`. """ @@ -677,15 +671,16 @@ def make_coord(self, coord_dims_func): return hybrid_height def update(self, old_coord, new_coord=None): - """ - Notifies the factory of the removal/replacement of a coordinate - which might be a dependency. + """Notify the factory of the removal/replacement of a coordinate. - Args: + Notify the factory of the removal/replacement of a coordinate + which might be a dependency. - * old_coord: + Parameters + ---------- + old_coord: The coordinate to be removed/replaced. - * new_coord: + new_coord: optional If None, any dependency using old_coord is removed, otherwise any dependency using old_coord is updated to use new_coord. @@ -713,26 +708,23 @@ def update(self, old_coord, new_coord=None): class HybridPressureFactory(AuxCoordFactory): - """ - Defines a hybrid-pressure coordinate factory with the formula: - p = ap + b * ps - - """ + """Define a hybrid-pressure coordinate factory.""" def __init__(self, delta=None, sigma=None, surface_air_pressure=None): - """ - Creates a hybrid-height coordinate factory with the formula: + """Create a hybrid-height coordinate factory with the following formula. + + .. math:: p = ap + b * ps At least one of `delta` or `surface_air_pressure` must be provided. - Args: - - * delta: Coord + Parameters + ---------- + delta: Coord, optional The coordinate providing the `ap` term. - * sigma: Coord + sigma: Coord, optional The coordinate providing the `b` term. - * surface_air_pressure: Coord + surface_air_pressure: Coord, optional The coordinate providing the `ps` term. """ @@ -809,7 +801,8 @@ def _check_dependencies(delta, sigma, surface_air_pressure): @property def dependencies(self): - """ + """Return a dict mapping from constructor arg names to coordinates. + Returns a dictionary mapping from constructor argument names to the corresponding coordinates. @@ -824,15 +817,14 @@ def _derive(self, delta, sigma, surface_air_pressure): return delta + sigma * surface_air_pressure def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this - factory. + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - Args: - - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. + See :meth:`iris.cube.Cube.coord_dims()`. """ @@ -897,14 +889,17 @@ def __init__( nsigma=None, zlev=None, ): - """ - Creates an ocean sigma over z coordinate factory with the formula: + """Create an ocean sigma over z coordinate factory with the following formula. if k < nsigma: + + .. math:: z(n, k, j, i) = eta(n, j, i) + sigma(k) * - (min(depth_c, depth(j, i)) + eta(n, j, i)) + (min(depth_c, depth(j, i)) + eta(n, j, i)) if k >= nsigma: + + .. math:: z(n, k, j, i) = zlev(k) The `zlev` and 'nsigma' coordinates must be provided, and at least @@ -1011,7 +1006,8 @@ def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev): @property def dependencies(self): - """ + """Return a dict mapping from constructor arg names to coordinates. + Returns a dictionary mapping from constructor argument names to the corresponding coordinates. @@ -1083,12 +1079,11 @@ def _derive(self, sigma, eta, depth, depth_c, zlev, nsigma, coord_dims_func): return result def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this factory. - - Args: + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -1162,11 +1157,11 @@ class OceanSigmaFactory(AuxCoordFactory): """Defines an ocean sigma coordinate factory.""" def __init__(self, sigma=None, eta=None, depth=None): - """ - Creates an ocean sigma coordinate factory with the formula: + """Create an ocean sigma coordinate factory with the following formula. - z(n, k, j, i) = eta(n, j, i) + sigma(k) * - (depth(j, i) + eta(n, j, i)) + .. math:: + z(n, k, j, i) = eta(n, j, i) + sigma(k) * + (depth(j, i) + eta(n, j, i)) """ # Configure the metadata manager. @@ -1234,7 +1229,8 @@ def _check_dependencies(sigma, eta, depth): @property def dependencies(self): - """ + """Return a dict mapping from constructor arg names to coordinates. + Returns a dictionary mapping from constructor argument names to the corresponding coordinates. @@ -1245,12 +1241,11 @@ def _derive(self, sigma, eta, depth): return eta + sigma * (depth + eta) def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this factory. + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - Args: - - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -1314,12 +1309,14 @@ class OceanSg1Factory(AuxCoordFactory): """Defines an Ocean s-coordinate, generic form 1 factory.""" def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None): - """ - Creates an Ocean s-coordinate, generic form 1 factory with the formula: + """Create an Ocean s-coordinate, generic form 1 factory with the following formula. - z(n,k,j,i) = S(k,j,i) + eta(n,j,i) * (1 + S(k,j,i) / depth(j,i)) + .. math:: + z(n,k,j,i) = S(k,j,i) + eta(n,j,i) * (1 + S(k,j,i) / depth(j,i)) where: + + .. math:: S(k,j,i) = depth_c * s(k) + (depth(j,i) - depth_c) * C(k) """ @@ -1408,8 +1405,9 @@ def _check_dependencies(s, c, eta, depth, depth_c): @property def dependencies(self): - """ - Returns a dictionary mapping from constructor argument names to + """Return a dict mapping from constructor arg names to coordinates. + + Return a dictionary mapping from constructor argument names to the corresponding coordinates. """ @@ -1426,12 +1424,11 @@ def _derive(self, s, c, eta, depth, depth_c): return S + eta * (1 + S / depth) def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this factory. - - Args: + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -1499,13 +1496,17 @@ class OceanSFactory(AuxCoordFactory): """Defines an Ocean s-coordinate factory.""" def __init__(self, s=None, eta=None, depth=None, a=None, b=None, depth_c=None): - """ - Creates an Ocean s-coordinate factory with the formula: + """Create an Ocean s-coordinate factory with a formula. + + .. math:: - z(n,k,j,i) = eta(n,j,i)*(1+s(k)) + depth_c*s(k) + - (depth(j,i)-depth_c)*C(k) + z(n,k,j,i) = eta(n,j,i)*(1+s(k)) + depth_c*s(k) + + (depth(j,i)-depth_c)*C(k) where: + + .. math:: + C(k) = (1-b) * sinh(a*s(k)) / sinh(a) + b * [tanh(a * (s(k) + 0.5)) / (2 * tanh(0.5*a)) - 0.5] @@ -1592,8 +1593,9 @@ def _check_dependencies(s, eta, depth, a, b, depth_c): @property def dependencies(self): - """ - Returns a dictionary mapping from constructor argument names to + """Return a dict mapping from constructor arg names to coordinates. + + Return a dictionary mapping from constructor argument names to the corresponding coordinates. """ @@ -1613,12 +1615,11 @@ def _derive(self, s, eta, depth, a, b, depth_c): return eta * (1 + s) + depth_c * s + (depth - depth_c) * c def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this factory. - - Args: + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -1688,12 +1689,14 @@ class OceanSg2Factory(AuxCoordFactory): """Defines an Ocean s-coordinate, generic form 2 factory.""" def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None): - """ - Creates an Ocean s-coordinate, generic form 2 factory with the formula: + """Create an Ocean s-coordinate, generic form 2 factory with the following formula. - z(n,k,j,i) = eta(n,j,i) + (eta(n,j,i) + depth(j,i)) * S(k,j,i) + .. math:: + z(n,k,j,i) = eta(n,j,i) + (eta(n,j,i) + depth(j,i)) * S(k,j,i) where: + + .. math:: S(k,j,i) = (depth_c * s(k) + depth(j,i) * C(k)) / (depth_c + depth(j,i)) @@ -1783,7 +1786,8 @@ def _check_dependencies(s, c, eta, depth, depth_c): @property def dependencies(self): - """ + """Returns a dicti mapping from constructor arg names to coordinates. + Returns a dictionary mapping from constructor argument names to the corresponding coordinates. @@ -1801,12 +1805,11 @@ def _derive(self, s, c, eta, depth, depth_c): return eta + (eta + depth) * S def make_coord(self, coord_dims_func): - """ - Returns a new :class:`iris.coords.AuxCoord` as defined by this factory. - - Args: + """Return a new :class:`iris.coords.AuxCoord` as defined by this factory. - * coord_dims_func: + Parameters + ---------- + coord_dims_func: A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 391c4bb6f5..b944b51978 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -83,6 +83,7 @@ def decode_uri(uri, default="file"): In addition to well-formed URIs, it also supports bare file paths as strings or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are accepted. + It also supports 'bare objects', i.e. anything which is not a string. These are identified with a scheme of 'data', and returned unchanged. diff --git a/pyproject.toml b/pyproject.toml index 73e9bf0693..acbb0c9c4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,6 +90,8 @@ ignore = [ preview = false select = [ "ALL", + # list specific rules to include that is skipped using numpy convention. + "D212", # Multi-line docstring summary should start at the first line ] [tool.ruff.lint.isort] From 0571b5517c61b146f7c6287c7071fe6816832aee Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Fri, 15 Dec 2023 09:36:39 +0000 Subject: [PATCH 115/134] Add sg_execution_times.rst to .gitignore [skip ci] (#5639) --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 42d02d8c71..1b132cbd38 100644 --- a/.gitignore +++ b/.gitignore @@ -65,6 +65,7 @@ lib/iris/tests/results/imagerepo.lock # Auto generated documentation files docs/src/_build/* docs/src/generated +docs/src/sg_execution_times.rst # Example test results docs/iris_image_test_output/ From 90b3c7da49b93a9ff24ef551510c9d2d6084b372 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Fri, 15 Dec 2023 15:24:42 +0000 Subject: [PATCH 116/134] Artifact upgrade (#5644) * Bump actions/upload-artifact from 3 to 4 Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 3 to 4. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Update benchmarks_report.yml * Update ci-wheels.yml * Kick CI. * Correct upload-artifact typo. * Remove extra line. --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Patrick Peglar --- .github/workflows/benchmarks_report.yml | 4 ++-- .github/workflows/benchmarks_run.yml | 4 ++-- .github/workflows/ci-wheels.yml | 10 +++++----- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml index 45b857cc31..93a2bc1a77 100644 --- a/.github/workflows/benchmarks_report.yml +++ b/.github/workflows/benchmarks_report.yml @@ -54,7 +54,7 @@ jobs: echo "reports_exist=$reports_exist" >> "$GITHUB_OUTPUT" - name: Store artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: benchmark_reports path: benchmark_reports @@ -68,7 +68,7 @@ jobs: uses: actions/checkout@v4 - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: benchmark_reports path: .github/workflows/benchmark_reports diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmarks_run.yml index bcc18d62c4..5bc2fba6ec 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmarks_run.yml @@ -113,14 +113,14 @@ jobs: - name: Upload any benchmark reports if: success() || steps.overnight.outcome == 'failure' - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: benchmark_reports path: .github/workflows/benchmark_reports - name: Archive asv results if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: asv-raw-results path: benchmarks/.asv/results diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index 450a18eb86..35dda2e0a6 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -37,7 +37,7 @@ jobs: run: | pipx run build - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: pypi-artifacts path: ${{ github.workspace }}/dist/* @@ -61,7 +61,7 @@ jobs: with: fetch-depth: 0 - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -116,7 +116,7 @@ jobs: name: "show artifacts" runs-on: ubuntu-latest steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -133,7 +133,7 @@ jobs: # and check for the SciTools repo if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' && github.repository_owner == 'SciTools' steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -153,7 +153,7 @@ jobs: # upload to PyPI for every tag starting with 'v' if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') && github.repository_owner == 'SciTools' steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: pypi-artifacts path: ${{ github.workspace }}/dist From d0ee9c26454f4498411e0fd6bd7fbaba2d1bcc48 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Fri, 15 Dec 2023 16:01:30 +0000 Subject: [PATCH 117/134] DOCS: Enabled all pydocstyle (numpy) ruff rules with a list of specific exceptions. (#5636) * Enabled all ruff rules with a list of specific exceptions. * Removed trailing line * changed text * added another ignore * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix for doctest ci * amended sentence structure * fixed note issue. * fix doctests --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Bill Little --- .ruff.toml | 30 +- benchmarks/asv_delegated_conda.py | 9 +- benchmarks/benchmarks/__init__.py | 12 +- benchmarks/benchmarks/aux_factory.py | 6 +- benchmarks/benchmarks/coords.py | 7 +- benchmarks/benchmarks/cperf/__init__.py | 3 +- benchmarks/benchmarks/cperf/equality.py | 11 +- benchmarks/benchmarks/cperf/load.py | 10 +- benchmarks/benchmarks/cperf/save.py | 6 +- benchmarks/benchmarks/cube.py | 6 +- .../benchmarks/experimental/__init__.py | 3 +- .../benchmarks/experimental/ugrid/__init__.py | 6 +- .../experimental/ugrid/regions_combine.py | 22 +- .../benchmarks/generate_data/__init__.py | 9 +- benchmarks/benchmarks/generate_data/stock.py | 12 +- benchmarks/benchmarks/generate_data/ugrid.py | 15 +- .../benchmarks/generate_data/um_files.py | 12 +- benchmarks/benchmarks/import_iris.py | 3 +- benchmarks/benchmarks/iterate.py | 3 +- benchmarks/benchmarks/load/__init__.py | 6 +- benchmarks/benchmarks/load/ugrid.py | 3 +- .../benchmarks/metadata_manager_factory.py | 3 +- benchmarks/benchmarks/mixin.py | 3 +- benchmarks/benchmarks/plot.py | 3 +- benchmarks/benchmarks/regridding.py | 3 +- benchmarks/benchmarks/save.py | 3 +- benchmarks/benchmarks/sperf/__init__.py | 3 +- .../benchmarks/sperf/combine_regions.py | 22 +- benchmarks/benchmarks/sperf/equality.py | 6 +- benchmarks/benchmarks/sperf/load.py | 3 +- benchmarks/benchmarks/sperf/save.py | 8 +- benchmarks/benchmarks/trajectory.py | 3 +- benchmarks/bm_runner.py | 20 +- .../general/plot_SOI_filtering.py | 3 +- .../general/plot_anomaly_log_colouring.py | 3 +- docs/gallery_code/general/plot_coriolis.py | 3 +- .../general/plot_cross_section.py | 3 +- .../general/plot_custom_aggregation.py | 6 +- .../general/plot_custom_file_loading.py | 10 +- docs/gallery_code/general/plot_global_map.py | 3 +- docs/gallery_code/general/plot_inset.py | 3 +- .../general/plot_lineplot_with_legend.py | 3 +- .../gallery_code/general/plot_polar_stereo.py | 3 +- .../general/plot_polynomial_fit.py | 3 +- .../plot_projections_and_annotations.py | 3 +- .../general/plot_rotated_pole_mapping.py | 3 +- docs/gallery_code/general/plot_zonal_means.py | 3 +- docs/gallery_code/meteorology/plot_COP_1d.py | 3 +- .../gallery_code/meteorology/plot_COP_maps.py | 6 +- docs/gallery_code/meteorology/plot_TEC.py | 3 +- .../meteorology/plot_deriving_phenomena.py | 6 +- .../meteorology/plot_hovmoller.py | 3 +- .../meteorology/plot_lagged_ensemble.py | 6 +- .../meteorology/plot_wind_barbs.py | 3 +- .../meteorology/plot_wind_speed.py | 3 +- .../oceanography/plot_atlantic_profiles.py | 3 +- .../oceanography/plot_load_nemo.py | 3 +- .../oceanography/plot_orca_projection.py | 3 +- docs/gallery_tests/conftest.py | 9 +- .../documenting/docstrings_attribute.py | 11 +- .../documenting/docstrings_sample_routine.py | 3 +- .../src/further_topics/filtering_warnings.rst | 12 +- lib/iris/_concatenate.py | 101 ++---- lib/iris/_data_manager.py | 69 ++-- lib/iris/_deprecation.py | 9 +- lib/iris/_lazy_data.py | 35 +- lib/iris/_merge.py | 114 +++---- lib/iris/_representation/__init__.py | 3 +- lib/iris/_representation/cube_printout.py | 21 +- lib/iris/_representation/cube_summary.py | 12 +- lib/iris/analysis/_area_weighted.py | 35 +- lib/iris/analysis/_grid_angles.py | 18 +- lib/iris/analysis/_interpolation.py | 50 +-- lib/iris/analysis/_regrid.py | 42 +-- lib/iris/analysis/_scipy_interpolate.py | 17 +- lib/iris/analysis/cartography.py | 48 +-- lib/iris/analysis/geometry.py | 9 +- lib/iris/analysis/maths.py | 67 ++-- lib/iris/analysis/stats.py | 6 +- lib/iris/analysis/trajectory.py | 30 +- lib/iris/common/__init__.py | 3 +- lib/iris/common/_split_attribute_dicts.py | 12 +- lib/iris/common/lenient.py | 48 +-- lib/iris/common/metadata.py | 111 ++----- lib/iris/common/mixin.py | 9 +- lib/iris/common/resolve.py | 84 ++--- lib/iris/coord_categorisation.py | 30 +- lib/iris/coord_systems.py | 144 +++------ lib/iris/coords.py | 233 +++++--------- lib/iris/cube.py | 298 ++++++------------ lib/iris/exceptions.py | 84 ++--- lib/iris/experimental/__init__.py | 3 +- lib/iris/experimental/animate.py | 6 +- lib/iris/experimental/raster.py | 9 +- lib/iris/experimental/regrid.py | 52 +-- lib/iris/experimental/regrid_conservative.py | 12 +- lib/iris/experimental/representation.py | 18 +- lib/iris/experimental/stratify.py | 9 +- lib/iris/experimental/ugrid/__init__.py | 3 +- lib/iris/experimental/ugrid/cf.py | 21 +- lib/iris/experimental/ugrid/load.py | 30 +- lib/iris/experimental/ugrid/mesh.py | 168 ++++------ lib/iris/experimental/ugrid/metadata.py | 44 +-- lib/iris/experimental/ugrid/save.py | 6 +- lib/iris/experimental/ugrid/utils.py | 6 +- lib/iris/fileformats/__init__.py | 3 +- lib/iris/fileformats/_ff.py | 65 ++-- .../fileformats/_nc_load_rules/__init__.py | 3 +- .../fileformats/_nc_load_rules/actions.py | 6 +- lib/iris/fileformats/_nc_load_rules/engine.py | 27 +- .../fileformats/_nc_load_rules/helpers.py | 64 ++-- .../_structured_array_identification.py | 40 +-- lib/iris/fileformats/abf.py | 12 +- lib/iris/fileformats/cf.py | 74 ++--- lib/iris/fileformats/dot.py | 21 +- lib/iris/fileformats/name.py | 6 +- lib/iris/fileformats/name_loaders.py | 45 +-- lib/iris/fileformats/netcdf/__init__.py | 3 +- .../fileformats/netcdf/_thread_safe_nc.py | 57 ++-- lib/iris/fileformats/nimrod.py | 12 +- lib/iris/fileformats/nimrod_load_rules.py | 38 +-- lib/iris/fileformats/pp.py | 111 +++---- lib/iris/fileformats/pp_load_rules.py | 36 +-- lib/iris/fileformats/pp_save_rules.py | 27 +- lib/iris/fileformats/rules.py | 18 +- lib/iris/fileformats/um/__init__.py | 3 +- lib/iris/fileformats/um/_fast_load.py | 18 +- .../um/_fast_load_structured_fields.py | 21 +- lib/iris/fileformats/um/_ff_replacement.py | 12 +- .../um/_optimal_array_structuring.py | 6 +- lib/iris/io/format_picker.py | 30 +- lib/iris/iterate.py | 12 +- lib/iris/palette.py | 25 +- lib/iris/pandas.py | 25 +- lib/iris/plot.py | 81 ++--- lib/iris/quickplot.py | 33 +- lib/iris/symbols.py | 6 +- lib/iris/tests/__init__.py | 69 ++-- lib/iris/tests/experimental/__init__.py | 3 +- .../tests/experimental/regrid/__init__.py | 3 +- ..._area_weighted_rectilinear_src_and_grid.py | 24 +- .../test_regrid_conservative_via_esmpy.py | 27 +- lib/iris/tests/experimental/test_raster.py | 3 +- lib/iris/tests/graphics/__init__.py | 9 +- lib/iris/tests/graphics/idiff.py | 7 +- lib/iris/tests/graphics/recreate_imagerepo.py | 3 +- .../aux_factory/test_OceanSigmaZFactory.py | 3 +- .../concatenate/test_concatenate.py | 3 +- .../experimental/test_ugrid_load.py | 3 +- .../experimental/test_ugrid_save.py | 3 +- .../tests/integration/merge/test_merge.py | 3 +- .../integration/netcdf/test__dask_locks.py | 3 +- .../integration/netcdf/test_coord_systems.py | 3 +- .../integration/netcdf/test_delayed_save.py | 6 +- .../integration/netcdf/test_thread_safety.py | 9 +- .../tests/integration/plot/test_animate.py | 3 +- .../tests/integration/plot/test_colorbar.py | 3 +- .../tests/integration/plot/test_netcdftime.py | 3 +- .../tests/integration/plot/test_nzdateline.py | 3 +- .../integration/plot/test_plot_2d_coords.py | 3 +- .../integration/plot/test_vector_plots.py | 6 +- .../integration/test_netcdf__loadsaveattrs.py | 41 +-- .../integration/test_regrid_equivalence.py | 3 +- .../tests/integration/um/test_fieldsfile.py | 3 +- lib/iris/tests/pp.py | 8 +- lib/iris/tests/stock/__init__.py | 48 +-- lib/iris/tests/stock/_stock_2d_latlons.py | 15 +- lib/iris/tests/stock/mesh.py | 9 +- lib/iris/tests/stock/netcdf.py | 3 +- lib/iris/tests/system_test.py | 3 +- lib/iris/tests/test_analysis_calculus.py | 5 +- lib/iris/tests/test_cartography.py | 3 +- lib/iris/tests/test_cdm.py | 3 +- lib/iris/tests/test_cf.py | 3 +- lib/iris/tests/test_coding_standards.py | 12 +- lib/iris/tests/test_concatenate.py | 12 +- lib/iris/tests/test_constraints.py | 8 +- lib/iris/tests/test_cube_to_pp.py | 3 +- lib/iris/tests/test_ff.py | 3 +- lib/iris/tests/test_file_load.py | 6 +- lib/iris/tests/test_file_save.py | 3 +- lib/iris/tests/test_hybrid.py | 3 +- lib/iris/tests/test_intersect.py | 3 +- lib/iris/tests/test_io_init.py | 3 +- lib/iris/tests/test_iterate.py | 3 +- lib/iris/tests/test_load.py | 3 +- lib/iris/tests/test_mapping.py | 3 +- lib/iris/tests/test_merge.py | 14 +- lib/iris/tests/test_netcdf.py | 6 +- lib/iris/tests/test_pickling.py | 8 +- lib/iris/tests/test_plot.py | 15 +- lib/iris/tests/test_pp_module.py | 3 +- lib/iris/tests/test_quickplot.py | 3 +- lib/iris/tests/test_std_names.py | 3 +- lib/iris/tests/test_util.py | 3 +- .../test_AreaWeightedRegridder.py | 3 +- .../cartography/test_gridcell_angles.py | 6 +- .../cartography/test_rotate_grid_vectors.py | 3 +- .../analysis/cartography/test_rotate_winds.py | 6 +- .../test__extract_relevant_cube_slice.py | 3 +- .../geometry/test_geometry_area_weights.py | 3 +- .../test_RectilinearInterpolator.py | 24 +- .../interpolation/test_get_xy_dim_coords.py | 3 +- .../maths/test__arith__derived_coords.py | 3 +- .../analysis/maths/test__arith__meshcoords.py | 3 +- .../unit/analysis/maths/test__get_dtype.py | 3 +- .../maths/test__inplace_common_checks.py | 3 +- .../unit/analysis/maths/test__output_dtype.py | 3 +- lib/iris/tests/unit/analysis/test_MAX_RUN.py | 6 +- .../tests/unit/analysis/test_PERCENTILE.py | 9 +- .../analysis/test_PercentileAggregator.py | 3 +- .../test_WeightedPercentileAggregator.py | 3 +- .../analysis/test__axis_to_single_trailing.py | 3 +- .../analysis/trajectory/test_Trajectory.py | 3 +- ...t_UnstructuredNearestNeighbourRegridder.py | 3 +- ...est__nearest_neighbour_indices_ndcoords.py | 3 +- .../analysis/trajectory/test_interpolate.py | 3 +- .../test_AtmosphereSigmaFactory.py | 3 +- .../unit/aux_factory/test_AuxCoordFactory.py | 3 +- .../aux_factory/test_HybridPressureFactory.py | 3 +- .../unit/aux_factory/test_OceanSFactory.py | 3 +- .../unit/aux_factory/test_OceanSg1Factory.py | 3 +- .../unit/aux_factory/test_OceanSg2Factory.py | 3 +- .../aux_factory/test_OceanSigmaFactory.py | 3 +- .../aux_factory/test_OceanSigmaZFactory.py | 3 +- .../tests/unit/common/lenient/test_Lenient.py | 3 +- .../unit/common/lenient/test__Lenient.py | 3 +- .../common/lenient/test__lenient_client.py | 3 +- .../common/lenient/test__lenient_service.py | 3 +- .../unit/common/lenient/test__qualname.py | 3 +- .../test_AncillaryVariableMetadata.py | 3 +- .../unit/common/metadata/test_BaseMetadata.py | 3 +- .../metadata/test_CellMeasureMetadata.py | 3 +- .../common/metadata/test_CoordMetadata.py | 3 +- .../unit/common/metadata/test_CubeMetadata.py | 27 +- .../common/metadata/test__NamedTupleMeta.py | 3 +- .../unit/common/metadata/test_hexdigest.py | 3 +- .../common/metadata/test_metadata_filter.py | 3 +- .../metadata/test_metadata_manager_factory.py | 3 +- .../unit/common/mixin/test_CFVariableMixin.py | 3 +- .../common/mixin/test_LimitedAttributeDict.py | 3 +- .../mixin/test__get_valid_standard_name.py | 3 +- .../tests/unit/common/resolve/test_Resolve.py | 3 +- .../coord_categorisation/test_add_hour.py | 3 +- .../test_coord_categorisation.py | 3 +- .../coord_systems/test_AlbersEqualArea.py | 3 +- .../test_LambertAzimuthalEqualArea.py | 3 +- lib/iris/tests/unit/coords/__init__.py | 3 +- lib/iris/tests/unit/coords/test_AuxCoord.py | 3 +- lib/iris/tests/unit/coords/test_Cell.py | 6 +- lib/iris/tests/unit/coords/test_CellMethod.py | 3 +- lib/iris/tests/unit/coords/test_DimCoord.py | 3 +- .../unit/coords/test__DimensionalMetadata.py | 25 +- lib/iris/tests/unit/cube/test_Cube.py | 44 +-- .../tests/unit/cube/test_CubeAttrsDict.py | 3 +- lib/iris/tests/unit/cube/test_CubeList.py | 10 +- .../unit/cube/test_Cube__aggregated_by.py | 32 +- .../unit/data_manager/test_DataManager.py | 3 +- ..._area_weighted_rectilinear_src_and_grid.py | 3 +- ...rid_weighted_curvilinear_to_rectilinear.py | 3 +- .../experimental/stratify/test_relevel.py | 3 +- ...test_CFUGridAuxiliaryCoordinateVariable.py | 3 +- .../cf/test_CFUGridConnectivityVariable.py | 3 +- .../ugrid/cf/test_CFUGridGroup.py | 3 +- .../ugrid/cf/test_CFUGridMeshVariable.py | 3 +- .../ugrid/cf/test_CFUGridReader.py | 3 +- .../ugrid/load/test_ParseUgridOnLoad.py | 3 +- .../experimental/ugrid/load/test_load_mesh.py | 3 +- .../ugrid/load/test_load_meshes.py | 3 +- .../unit/experimental/ugrid/mesh/test_Mesh.py | 3 +- .../experimental/ugrid/mesh/test_MeshCoord.py | 9 +- .../ugrid/mesh/test_Mesh__from_coords.py | 3 +- .../metadata/test_ConnectivityMetadata.py | 3 +- .../ugrid/metadata/test_MeshCoordMetadata.py | 3 +- .../ugrid/metadata/test_MeshMetadata.py | 3 +- .../ugrid/utils/test_recombine_submeshes.py | 3 +- lib/iris/tests/unit/fileformats/__init__.py | 3 +- .../unit/fileformats/cf/test_CFReader.py | 3 +- .../tests/unit/fileformats/ff/test_FF2PP.py | 3 +- .../name_loaders/test__build_cell_methods.py | 3 +- ...test__build_lat_lon_for_NAME_timeseries.py | 3 +- .../test__calc_integration_period.py | 3 +- .../name_loaders/test__cf_height_from_name.py | 3 +- .../name_loaders/test__generate_cubes.py | 3 +- .../fileformats/nc_load_rules/__init__.py | 3 +- .../nc_load_rules/actions/__init__.py | 12 +- .../actions/test__grid_mappings.py | 9 +- .../actions/test__hybrid_formulae.py | 3 +- .../actions/test__latlon_dimcoords.py | 3 +- .../actions/test__miscellaneous.py | 3 +- .../actions/test__time_coords.py | 14 +- .../nc_load_rules/engine/__init__.py | 3 +- .../nc_load_rules/engine/test_engine.py | 3 +- .../nc_load_rules/helpers/__init__.py | 3 +- ...ild_albers_equal_area_coordinate_system.py | 3 +- .../helpers/test_build_ancil_var.py | 3 +- .../test_build_auxiliary_coordinate.py | 3 +- .../helpers/test_build_cell_measure.py | 3 +- .../helpers/test_build_cube_metadata.py | 3 +- .../test_build_dimension_coordinate.py | 3 +- ...t_build_geostationary_coordinate_system.py | 6 +- ..._azimuthal_equal_area_coordinate_system.py | 3 +- ...ild_lambert_conformal_coordinate_system.py | 3 +- .../test_build_mercator_coordinate_system.py | 3 +- ...uild_oblique_mercator_coordinate_system.py | 3 +- ...d_polar_stereographic_coordinate_system.py | 3 +- ...t_build_stereographic_coordinate_system.py | 3 +- ...d_transverse_mercator_coordinate_system.py | 3 +- .../test_build_verticalp_coordinate_system.py | 6 +- .../helpers/test_get_attr_units.py | 3 +- .../helpers/test_get_cf_bounds_var.py | 3 +- .../nc_load_rules/helpers/test_get_names.py | 6 +- .../test_has_supported_mercator_parameters.py | 3 +- ...upported_polar_stereographic_parameters.py | 3 +- .../helpers/test_parse_cell_methods.py | 3 +- .../helpers/test_reorder_bounds_data.py | 3 +- .../netcdf/loader/test__chunk_control.py | 3 +- ...__translate_constraints_to_var_callback.py | 3 +- .../netcdf/loader/test_load_cubes.py | 3 +- .../fileformats/netcdf/saver/test_Saver.py | 6 +- .../saver/test_Saver__lazy_stream_data.py | 6 +- .../netcdf/saver/test_Saver__ugrid.py | 34 +- .../saver/test__data_fillvalue_check.py | 3 +- .../netcdf/saver/test__fillvalue_report.py | 3 +- .../fileformats/netcdf/saver/test_save.py | 9 +- .../nimrod_load_rules/test_units.py | 3 +- .../nimrod_load_rules/test_vertical_coord.py | 3 +- .../pp/test__data_bytes_to_shaped_array.py | 3 +- .../tests/unit/fileformats/pp/test_save.py | 8 +- .../pp_load_rules/test__all_other_rules.py | 3 +- ...__collapse_degenerate_points_and_bounds.py | 3 +- ...est__convert_scalar_pseudo_level_coords.py | 3 +- ...test__convert_scalar_realization_coords.py | 3 +- .../test__convert_time_coords.py | 3 +- .../test__convert_vertical_coords.py | 3 +- .../pp_load_rules/test__epoch_date_hours.py | 3 +- .../pp_load_rules/test__model_level_number.py | 3 +- .../test__reduced_points_and_bounds.py | 3 +- .../test__reshape_vector_args.py | 3 +- .../__init__.py | 3 +- .../test_ArrayStructure.py | 6 +- .../test_GroupStructure.py | 3 +- lib/iris/tests/unit/fileformats/test_rules.py | 3 +- .../unit/fileformats/um/fast_load/__init__.py | 3 +- .../um/fast_load/test_FieldCollation.py | 3 +- .../fast_load_structured_fields/__init__.py | 3 +- .../test_BasicFieldCollation.py | 3 +- .../test_group_structured_fields.py | 6 +- .../um/optimal_array_structuring/__init__.py | 3 +- .../test_optimal_array_structure.py | 3 +- .../unit/fileformats/um/test_um_to_pp.py | 3 +- lib/iris/tests/unit/plot/__init__.py | 3 +- lib/iris/tests/unit/plot/test_plot.py | 9 +- .../tests/unit/util/test__coord_regular.py | 3 +- lib/iris/tests/unit/util/test__mask_array.py | 17 +- .../unit/util/test__slice_data_with_keys.py | 3 +- .../unit/util/test_equalise_attributes.py | 6 +- .../unit/util/test_file_is_newer_than.py | 3 +- lib/iris/tests/unit/util/test_mask_cube.py | 3 +- lib/iris/util.py | 108 +++---- noxfile.py | 27 +- setup.py | 3 +- 362 files changed, 1586 insertions(+), 3268 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index afe5941a14..e09d03c2d4 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -23,14 +23,37 @@ lint.ignore = [ # pydocstyle (D) # https://docs.astral.sh/ruff/rules/#pydocstyle-d - "D", - # Permanent + # (D-1) Permanent "D105", # Missing docstring in magic method - # Temporary, to be removed when we are more compliant + # (D-2) Temporary, to be removed when we are more compliant. Rare cases mmove to (1). "D417", # Missing argument descriptions in the docstring "D101", # Missing docstring in public class "D102", # Missing docstring in public method + "D104", # Missing docstring in public package "D106", # Missing docstring in public nested class + # (D-3) Temporary, before an initial review, either fix ocurrenvces or move to (2). + "D100", # Missing docstring in public module + "D103", # Missing docstring in public function + "D200", # One-line docstring should fit on one line + "D202", # No blank lines allowed after function docstring + "D205", # 1 blank line required between summary line and description + "D208", # Docstring is over-indented + "D209", # Multi-line docstring closing quotes should be on a separate line + "D211", # No blank lines allowed before class docstring + "D214", # Section is over-indented + "D300", # triple double quotes `""" / Use triple single quotes `'''` + "D301", # Use `r"""` if any backslashes in a docstring + "D400", # First line should end with a period + "D401", # First line of docstring should be in imperative mood: ... + "D403", # First word of the first line should be capitalized + "D404", # First word of the docstring should not be "This" + "D405", # Section name should be properly capitalized + "D406", # Section name should end with a newline + "D407", # Missing dashed underline after section + "D409", # Section underline should match the length of its name + "D410", # Missing blank line after section + "D411", # Missing blank line before section + "D412", # No blank lines allowed between a section header and its content # pyupgrade (UP) # https://docs.astral.sh/ruff/rules/#pyupgrade-up @@ -240,4 +263,3 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf "RUF", ] - diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index 8851c21108..b0bdd6b64b 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` +"""ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` subclass that manages the Conda environment via custom user scripts. """ @@ -22,8 +21,7 @@ class CondaDelegated(Conda): - """ - Manage a Conda environment using custom user scripts, run at each commit. + """Manage a Conda environment using custom user scripts, run at each commit. Ignores user input variations - ``matrix`` / ``pythons`` / ``conda_environment_file``, since environment is being managed outside ASV. @@ -44,8 +42,7 @@ def __init__( requirements: dict, tagged_env_vars: dict, ) -> None: - """ - Parameters + """Parameters ---------- conf : Config instance diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 61983a969f..14edb2eda7 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -10,8 +10,7 @@ def disable_repeat_between_setup(benchmark_object): - """ - Decorator for benchmarks where object persistence would be inappropriate. + """Decorator for benchmarks where object persistence would be inappropriate. E.g: * Benchmarking data realisation @@ -37,8 +36,7 @@ def disable_repeat_between_setup(benchmark_object): class TrackAddedMemoryAllocation: - """ - Context manager which measures by how much process resident memory grew, + """Context manager which measures by how much process resident memory grew, during execution of its enclosed code block. Obviously limited as to what it actually measures : Relies on the current @@ -86,8 +84,7 @@ def addedmem_mb(self): @staticmethod def decorator(decorated_func): - """ - Decorates this benchmark to track growth in resident memory during execution. + """Decorates this benchmark to track growth in resident memory during execution. Intended for use on ASV ``track_`` benchmarks. Applies the :class:`TrackAddedMemoryAllocation` context manager to the benchmark @@ -108,8 +105,7 @@ def _wrapper(*args, **kwargs): def on_demand_benchmark(benchmark_object): - """ - Decorator. Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. + """Decorator. Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. For benchmarks that, for whatever reason, should not be run by default. E.g: diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index 7d1e266c78..49dc6a368c 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -AuxFactory benchmark tests. +"""AuxFactory benchmark tests. """ @@ -20,8 +19,7 @@ class FactoryCommon: # * remove NotImplementedError # * combine setup_common into setup - """ - A base class running a generalised suite of benchmarks for any factory. + """A base class running a generalised suite of benchmarks for any factory. Factory to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index b6f56b997f..1c8f49967a 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Coord benchmark tests. +"""Coord benchmark tests. """ @@ -26,9 +25,7 @@ class CoordCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """ - - A base class running a generalised suite of benchmarks for any coord. + """A base class running a generalised suite of benchmarks for any coord. Coord to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/cperf/__init__.py b/benchmarks/benchmarks/cperf/__init__.py index 7adba01b60..eaff9cf5e0 100644 --- a/benchmarks/benchmarks/cperf/__init__.py +++ b/benchmarks/benchmarks/cperf/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +"""Benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. CPerf = comparing performance working with data in UM versus LFRic formats. diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py index ea05cd6bf6..9a3ceded9f 100644 --- a/benchmarks/benchmarks/cperf/equality.py +++ b/benchmarks/benchmarks/cperf/equality.py @@ -2,16 +2,14 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +"""Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ from .. import on_demand_benchmark from . import SingleDiagnosticMixin class EqualityMixin(SingleDiagnosticMixin): - """ - Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing + """Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing :class:`~iris.cube.Cube`\\ s that have been loaded from file. """ @@ -26,9 +24,8 @@ def setup(self, file_type, three_d=False, three_times=False): @on_demand_benchmark class CubeEquality(EqualityMixin): - """ - Benchmark time and memory costs of comparing LFRic and UM - :class:`~iris.cube.Cube`\\ s. + """Benchmark time and memory costs of comparing LFRic and UM + :class:`~iris.cube.Cube`\\ s. """ def _comparison(self): diff --git a/benchmarks/benchmarks/cperf/load.py b/benchmarks/benchmarks/cperf/load.py index e67c095973..2d3c0b5c6b 100644 --- a/benchmarks/benchmarks/cperf/load.py +++ b/benchmarks/benchmarks/cperf/load.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +"""File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ from .. import on_demand_benchmark from . import SingleDiagnosticMixin @@ -12,10 +11,9 @@ @on_demand_benchmark class SingleDiagnosticLoad(SingleDiagnosticMixin): def time_load(self, _, __, ___): - """ - The 'real world comparison' - * UM coords are always realised (DimCoords). - * LFRic coords are not realised by default (MeshCoords). + """The 'real world comparison' + * UM coords are always realised (DimCoords). + * LFRic coords are not realised by default (MeshCoords). """ cube = self.load() diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py index 95814df53a..528f878265 100644 --- a/benchmarks/benchmarks/cperf/save.py +++ b/benchmarks/benchmarks/cperf/save.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -File saving benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +"""File saving benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ from iris import save @@ -15,8 +14,7 @@ @on_demand_benchmark class NetcdfSave: - """ - Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. + """Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. Parametrised by file type. """ diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index 57aec690e0..f11e135996 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Cube benchmark tests. +"""Cube benchmark tests. """ @@ -32,8 +31,7 @@ class ComponentCommon: # * remove NotImplementedError # * combine setup_common into setup - """ - A base class running a generalised suite of benchmarks for cubes that + """A base class running a generalised suite of benchmarks for cubes that include a specified component (e.g. Coord, CellMeasure etc.). Component to be specified in a subclass. diff --git a/benchmarks/benchmarks/experimental/__init__.py b/benchmarks/benchmarks/experimental/__init__.py index 81fb222916..d1f34cdb15 100644 --- a/benchmarks/benchmarks/experimental/__init__.py +++ b/benchmarks/benchmarks/experimental/__init__.py @@ -2,7 +2,6 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Benchmark tests for the experimental module. +"""Benchmark tests for the experimental module. """ diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py index 2ce7ba4623..add9b0d37c 100644 --- a/benchmarks/benchmarks/experimental/ugrid/__init__.py +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Benchmark tests for the experimental.ugrid module. +"""Benchmark tests for the experimental.ugrid module. """ @@ -18,8 +17,7 @@ class UGridCommon: - """ - A base class running a generalised suite of benchmarks for any ugrid object. + """A base class running a generalised suite of benchmarks for any ugrid object. Object to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index 04b5933e70..a4df9aa8c7 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Benchmarks stages of operation of the function +"""Benchmarks stages of operation of the function :func:`iris.experimental.ugrid.utils.recombine_submeshes`. Where possible benchmarks should be parameterised for two sizes of input data: @@ -90,8 +89,7 @@ def setup_cache(self): ) def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): - """ - The combine-tests "standard" setup operation. + """The combine-tests "standard" setup operation. Load the source cubes (full-mesh + region) from disk. These are specific to the cubesize parameter. @@ -139,8 +137,7 @@ def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): self.fix_dask_settings() def fix_dask_settings(self): - """ - Fix "standard" dask behaviour for time+space testing. + """Fix "standard" dask behaviour for time+space testing. Currently this is single-threaded mode, with known chunksize, which is optimised for space saving so we can test largest data. @@ -166,8 +163,7 @@ def recombine(self): class CombineRegionsCreateCube(MixinCombineRegions): - """ - Time+memory costs of creating a combined-regions cube. + """Time+memory costs of creating a combined-regions cube. The result is lazy, and we don't do the actual calculation. @@ -187,9 +183,7 @@ def track_addedmem_create_combined_cube(self, n_cubesphere): class CombineRegionsComputeRealData(MixinCombineRegions): - """ - Time+memory costs of computing combined-regions data. - """ + """Time+memory costs of computing combined-regions data.""" def time_compute_data(self, n_cubesphere): _ = self.recombined_cube.data @@ -202,8 +196,7 @@ def track_addedmem_compute_data(self, n_cubesphere): class CombineRegionsSaveData(MixinCombineRegions): - """ - Test saving *only*, having replaced the input cube data with 'imaginary' + """Test saving *only*, having replaced the input cube data with 'imaginary' array data, so that input data is not loaded from disk during the save operation. @@ -228,8 +221,7 @@ def track_filesize_saved(self, n_cubesphere): class CombineRegionsFileStreamedCalc(MixinCombineRegions): - """ - Test the whole cost of file-to-file streaming. + """Test the whole cost of file-to-file streaming. Uses the combined cube which is based on lazy data loading from the region cubes on disk. """ diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py index db4c6c0ca0..4d80429889 100644 --- a/benchmarks/benchmarks/generate_data/__init__.py +++ b/benchmarks/benchmarks/generate_data/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Scripts for generating supporting data for benchmarking. +"""Scripts for generating supporting data for benchmarking. Data generated using Iris should use :func:`run_function_elsewhere`, which means that data is generated using a fixed version of Iris and a fixed @@ -60,8 +59,7 @@ def run_function_elsewhere(func_to_run, *args, **kwargs): - """ - Run a given function using the :const:`DATA_GEN_PYTHON` executable. + """Run a given function using the :const:`DATA_GEN_PYTHON` executable. This structure allows the function to be written natively. @@ -101,8 +99,7 @@ def run_function_elsewhere(func_to_run, *args, **kwargs): @contextmanager def load_realised(): - """ - Force NetCDF loading with realised arrays. + """Force NetCDF loading with realised arrays. Since passing between data generation and benchmarking environments is via file loading, but some benchmarks are only meaningful if starting with real diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py index 9b824efd17..058eac01b1 100644 --- a/benchmarks/benchmarks/generate_data/stock.py +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Wrappers for using :mod:`iris.tests.stock` methods for benchmarking. +"""Wrappers for using :mod:`iris.tests.stock` methods for benchmarking. See :mod:`benchmarks.generate_data` for an explanation of this structure. """ @@ -50,8 +49,7 @@ def _external(func_name_, temp_file_dir, **kwargs_): def create_file__xios_2d_face_half_levels( temp_file_dir, dataset_name, n_faces=866, n_times=1 ): - """ - Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`. + """Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`. Have taken control of temp_file_dir @@ -70,8 +68,7 @@ def create_file__xios_2d_face_half_levels( def create_file__xios_3d_face_half_levels( temp_file_dir, dataset_name, n_faces=866, n_times=1, n_levels=38 ): - """ - Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`. + """Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`. Have taken control of temp_file_dir @@ -117,8 +114,7 @@ def _external(*args, **kwargs): def sample_meshcoord(sample_mesh_kwargs=None, location="face", axis="x"): - """ - Wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`. + """Wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`. Parameters deviate from the original as cannot pass a :class:`iris.experimental.ugrid.Mesh to the separate Python instance - must diff --git a/benchmarks/benchmarks/generate_data/ugrid.py b/benchmarks/benchmarks/generate_data/ugrid.py index 59114b1846..8cca53c907 100644 --- a/benchmarks/benchmarks/generate_data/ugrid.py +++ b/benchmarks/benchmarks/generate_data/ugrid.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Scripts for generating supporting data for UGRID-related benchmarking. +"""Scripts for generating supporting data for UGRID-related benchmarking. """ from iris import load_cube as iris_loadcube from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD @@ -16,8 +15,7 @@ def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: str): - """ - Construct and save to file an LFRIc cubesphere-like cube for a given + """Construct and save to file an LFRIc cubesphere-like cube for a given cubesphere size, *or* a simpler structured (UM-like) cube of equivalent size. @@ -57,8 +55,7 @@ def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: def make_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool): - """ - Generate an LFRIc cubesphere-like cube for a given cubesphere size, + """Generate an LFRIc cubesphere-like cube for a given cubesphere size, *or* a simpler structured (UM-like) cube of equivalent size. All the cube data, coords and mesh content are LAZY, and produced without @@ -100,8 +97,7 @@ def make_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool): def make_cube_like_umfield(xy_dims): - """ - Create a "UM-like" cube with lazy content, for save performance testing. + """Create a "UM-like" cube with lazy content, for save performance testing. Roughly equivalent to a single current UM cube, to be compared with a "make_cube_like_2d_cubesphere(n_cube=_N_CUBESPHERE_UM_EQUIVALENT)" @@ -160,8 +156,7 @@ def _external(xy_dims_, save_path_): def make_cubesphere_testfile(c_size, n_levels=0, n_times=1): - """ - Build a C cubesphere testfile in a given directory, with a standard naming. + """Build a C cubesphere testfile in a given directory, with a standard naming. If n_levels > 0 specified: 3d file with the specified number of levels. Return the file path. diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py index 110260de42..40bf83e79c 100644 --- a/benchmarks/benchmarks/generate_data/um_files.py +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Generate FF, PP and NetCDF files based on a minimal synthetic FF file. +"""Generate FF, PP and NetCDF files based on a minimal synthetic FF file. NOTE: uses the Mule package, so depends on an environment with Mule installed. """ @@ -12,8 +11,7 @@ def _create_um_files( len_x: int, len_y: int, len_z: int, len_t: int, compress, save_paths: dict ) -> None: - """ - Generate an FF object of given shape and compression, save to FF/PP/NetCDF. + """Generate an FF object of given shape and compression, save to FF/PP/NetCDF. This is run externally (:func:`benchmarks.generate_data.run_function_elsewhere`), so all imports @@ -46,8 +44,7 @@ def _create_um_files( array_provider = ArrayDataProvider(data_array) def add_field(level_: int, time_step_: int) -> None: - """ - Add a minimal field to the new :class:`~mule.FieldsFile`. + """Add a minimal field to the new :class:`~mule.FieldsFile`. Includes the minimum information to allow Mule saving and Iris loading, as well as incrementation for vertical levels and time @@ -158,8 +155,7 @@ def create_um_files( compress: bool, file_types: list, ) -> dict: - """ - Generate FF-based FF / PP / NetCDF files with specified shape and compression. + """Generate FF-based FF / PP / NetCDF files with specified shape and compression. All files representing a given shape are saved in a dedicated directory. A dictionary of the saved paths is returned. diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index 5f902fd2e0..37c98bee09 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -28,8 +28,7 @@ class Iris: @staticmethod def _import(module_name, reset_colormaps=False): - """ - Have experimented with adding sleep() commands into the imported + """Have experimented with adding sleep() commands into the imported modules. The results reveal: ASV avoids invoking `import x` if nothing gets called in the diff --git a/benchmarks/benchmarks/iterate.py b/benchmarks/benchmarks/iterate.py index 6cc935498c..800911f21a 100644 --- a/benchmarks/benchmarks/iterate.py +++ b/benchmarks/benchmarks/iterate.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Iterate benchmark tests. +"""Iterate benchmark tests. """ import numpy as np diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index 3f4b9b222b..8a7aa182d3 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -File loading benchmark tests. +"""File loading benchmark tests. Where applicable benchmarks should be parameterised for two sizes of input data: * minimal: enables detection of regressions in parts of the run-time that do @@ -143,8 +142,7 @@ def time_many_var_load(self) -> None: class StructuredFF: - """ - Test structured loading of a large-ish fieldsfile. + """Test structured loading of a large-ish fieldsfile. Structured load of the larger size should show benefit over standard load, avoiding the cost of merging. diff --git a/benchmarks/benchmarks/load/ugrid.py b/benchmarks/benchmarks/load/ugrid.py index cfbe55f2ad..626b746412 100644 --- a/benchmarks/benchmarks/load/ugrid.py +++ b/benchmarks/benchmarks/load/ugrid.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Mesh data loading benchmark tests. +"""Mesh data loading benchmark tests. Where possible benchmarks should be parameterised for two sizes of input data: * minimal: enables detection of regressions in parts of the run-time that do diff --git a/benchmarks/benchmarks/metadata_manager_factory.py b/benchmarks/benchmarks/metadata_manager_factory.py index 531af58b66..8e4de9949b 100644 --- a/benchmarks/benchmarks/metadata_manager_factory.py +++ b/benchmarks/benchmarks/metadata_manager_factory.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -metadata_manager_factory benchmark tests. +"""metadata_manager_factory benchmark tests. """ diff --git a/benchmarks/benchmarks/mixin.py b/benchmarks/benchmarks/mixin.py index 335bee1a0f..de5127253f 100644 --- a/benchmarks/benchmarks/mixin.py +++ b/benchmarks/benchmarks/mixin.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Mixin benchmark tests. +"""Mixin benchmark tests. """ diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index 73a2a51990..7942361295 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Plot benchmark tests. +"""Plot benchmark tests. """ import matplotlib diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py index a14d7c2668..1f8f2e3740 100644 --- a/benchmarks/benchmarks/regridding.py +++ b/benchmarks/benchmarks/regridding.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Regridding benchmark test +"""Regridding benchmark test """ diff --git a/benchmarks/benchmarks/save.py b/benchmarks/benchmarks/save.py index 6feb446c70..0c5f79947d 100644 --- a/benchmarks/benchmarks/save.py +++ b/benchmarks/benchmarks/save.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -File saving benchmarks. +"""File saving benchmarks. Where possible benchmarks should be parameterised for two sizes of input data: * minimal: enables detection of regressions in parts of the run-time that do diff --git a/benchmarks/benchmarks/sperf/__init__.py b/benchmarks/benchmarks/sperf/__init__.py index 111cd4b841..0a87dbb25c 100644 --- a/benchmarks/benchmarks/sperf/__init__.py +++ b/benchmarks/benchmarks/sperf/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +"""Benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. SPerf = assessing performance against a series of increasingly large LFRic datasets. diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py index 1c37275079..1012ccd932 100644 --- a/benchmarks/benchmarks/sperf/combine_regions.py +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Region combine benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +"""Region combine benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ import os.path @@ -86,8 +85,7 @@ def setup_cache(self): ) def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): - """ - The combine-tests "standard" setup operation. + """The combine-tests "standard" setup operation. Load the source cubes (full-mesh + region) from disk. These are specific to the cubesize parameter. @@ -138,8 +136,7 @@ def teardown(self, _): self.temp_save_path.unlink(missing_ok=True) def fix_dask_settings(self): - """ - Fix "standard" dask behaviour for time+space testing. + """Fix "standard" dask behaviour for time+space testing. Currently this is single-threaded mode, with known chunksize, which is optimised for space saving so we can test largest data. @@ -169,8 +166,7 @@ def save_recombined_cube(self): @on_demand_benchmark class CreateCube(Mixin): - """ - Time+memory costs of creating a combined-regions cube. + """Time+memory costs of creating a combined-regions cube. The result is lazy, and we don't do the actual calculation. @@ -191,9 +187,7 @@ def track_addedmem_create_combined_cube(self, n_cubesphere): @on_demand_benchmark class ComputeRealData(Mixin): - """ - Time+memory costs of computing combined-regions data. - """ + """Time+memory costs of computing combined-regions data.""" def time_compute_data(self, n_cubesphere): _ = self.recombined_cube.data @@ -205,8 +199,7 @@ def track_addedmem_compute_data(self, n_cubesphere): @on_demand_benchmark class SaveData(Mixin): - """ - Test saving *only*, having replaced the input cube data with 'imaginary' + """Test saving *only*, having replaced the input cube data with 'imaginary' array data, so that input data is not loaded from disk during the save operation. @@ -227,8 +220,7 @@ def track_filesize_saved(self, n_cubesphere): @on_demand_benchmark class FileStreamedCalc(Mixin): - """ - Test the whole cost of file-to-file streaming. + """Test the whole cost of file-to-file streaming. Uses the combined cube which is based on lazy data loading from the region cubes on disk. """ diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py index f6a3f547fa..827490b082 100644 --- a/benchmarks/benchmarks/sperf/equality.py +++ b/benchmarks/benchmarks/sperf/equality.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +"""Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ from .. import on_demand_benchmark from . import FileMixin @@ -11,8 +10,7 @@ @on_demand_benchmark class CubeEquality(FileMixin): - """ - Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s + """Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. Uses :class:`FileMixin` as the realistic case will be comparing diff --git a/benchmarks/benchmarks/sperf/load.py b/benchmarks/benchmarks/sperf/load.py index 54c8b3eddb..d3e9ea7ac9 100644 --- a/benchmarks/benchmarks/sperf/load.py +++ b/benchmarks/benchmarks/sperf/load.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +"""File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ from .. import on_demand_benchmark from . import FileMixin diff --git a/benchmarks/benchmarks/sperf/save.py b/benchmarks/benchmarks/sperf/save.py index 9892f0d239..2999e81227 100644 --- a/benchmarks/benchmarks/sperf/save.py +++ b/benchmarks/benchmarks/sperf/save.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -File saving benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +"""File saving benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ import os.path @@ -16,10 +15,7 @@ @on_demand_benchmark class NetcdfSave: - """ - Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. - - """ + """Benchmark time and memory costs of saving ~large-ish data cubes to netcdf.""" params = [[1, 100, 200, 300, 500, 1000, 1668], [False, True]] param_names = ["cubesphere_C", "is_unstructured"] diff --git a/benchmarks/benchmarks/trajectory.py b/benchmarks/benchmarks/trajectory.py index 0c99bf77c1..79466d3804 100644 --- a/benchmarks/benchmarks/trajectory.py +++ b/benchmarks/benchmarks/trajectory.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Trajectory benchmark test +"""Trajectory benchmark test """ diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 3f1d74b552..8b45031fca 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Argparse conveniences for executing common types of benchmark runs. +"""Argparse conveniences for executing common types of benchmark runs. """ from abc import ABC, abstractmethod @@ -68,9 +67,7 @@ def _check_requirements(package: str) -> None: def _prep_data_gen_env() -> None: - """ - Create/access a separate, unchanging environment for generating test data. - """ + """Create/access a separate, unchanging environment for generating test data.""" python_version = "3.11" data_gen_var = "DATA_GEN_PYTHON" @@ -133,9 +130,7 @@ def _setup_common() -> None: def _asv_compare(*commits: str, overnight_mode: bool = False) -> None: - """ - Run through a list of commits comparing each one to the next. - """ + """Run through a list of commits comparing each one to the next.""" commits = [commit[:8] for commit in commits] for i in range(len(commits) - 1): before = commits[i] @@ -154,8 +149,7 @@ def _asv_compare(*commits: str, overnight_mode: bool = False) -> None: def _gh_create_reports(commit_sha: str, results_full: str, results_shifts: str) -> None: - """ - If running under GitHub Actions: record the results in report(s). + """If running under GitHub Actions: record the results in report(s). Posting the reports is done by :func:`_gh_post_reports`, which must be run within a separate action to comply with GHA's security limitations. @@ -287,8 +281,7 @@ def _gh_create_reports(commit_sha: str, results_full: str, results_shifts: str) def _gh_post_reports() -> None: - """ - If running under GitHub Actions: post pre-prepared benchmark reports. + """If running under GitHub Actions: post pre-prepared benchmark reports. Reports are prepared by :func:`_gh_create_reports`, which must be run within a separate action to comply with GHA's security limitations. @@ -341,8 +334,7 @@ def add_asv_arguments(self) -> None: @staticmethod @abstractmethod def func(args: argparse.Namespace): - """ - The function to return when the subparser is parsed. + """The function to return when the subparser is parsed. `func` is then called, performing the user's selected sub-command. diff --git a/docs/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py index 507519808f..372369d450 100644 --- a/docs/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -1,5 +1,4 @@ -""" -Applying a Filter to a Time-Series +"""Applying a Filter to a Time-Series ================================== This example demonstrates low pass filtering a time-series by applying a diff --git a/docs/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py index 02b60f957d..af15868387 100644 --- a/docs/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -1,5 +1,4 @@ -""" -Colouring Anomaly Data With Logarithmic Scaling +"""Colouring Anomaly Data With Logarithmic Scaling =============================================== In this example, we need to plot anomaly data where the values have a diff --git a/docs/gallery_code/general/plot_coriolis.py b/docs/gallery_code/general/plot_coriolis.py index 45df403104..37be139dd3 100644 --- a/docs/gallery_code/general/plot_coriolis.py +++ b/docs/gallery_code/general/plot_coriolis.py @@ -1,5 +1,4 @@ -""" -Deriving the Coriolis Frequency Over the Globe +"""Deriving the Coriolis Frequency Over the Globe ============================================== This code computes the Coriolis frequency and stores it in a cube with diff --git a/docs/gallery_code/general/plot_cross_section.py b/docs/gallery_code/general/plot_cross_section.py index 42529e0885..f4fc0a2ecc 100644 --- a/docs/gallery_code/general/plot_cross_section.py +++ b/docs/gallery_code/general/plot_cross_section.py @@ -1,5 +1,4 @@ -""" -Cross Section Plots +"""Cross Section Plots =================== This example demonstrates contour plots of a cross-sectioned multi-dimensional diff --git a/docs/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py index 6ef6075fb3..e42144d777 100644 --- a/docs/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -1,5 +1,4 @@ -""" -Calculating a Custom Statistic +"""Calculating a Custom Statistic ============================== This example shows how to define and use a custom @@ -28,8 +27,7 @@ # Note: in order to meet the requirements of iris.analysis.Aggregator, it must # do the calculation over an arbitrary (given) data axis. def count_spells(data, threshold, axis, spell_length): - """ - Function to calculate the number of points in a sequence where the value + """Function to calculate the number of points in a sequence where the value has exceeded a threshold value for at least a certain number of timepoints. Generalised to operate on multiple time sequences arranged on a specific diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index 8040ea81f5..d76ea877b2 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -1,5 +1,4 @@ -""" -Loading a Cube From a Custom File Format +"""Loading a Cube From a Custom File Format ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ This example shows how a custom text file can be loaded using the standard Iris @@ -90,8 +89,7 @@ def load_NAME_III(filename): - """ - Loads the Met Office's NAME III grid output files returning headers, column + """Loads the Met Office's NAME III grid output files returning headers, column definitions and data arrays as 3 separate lists. """ @@ -180,9 +178,7 @@ def load_NAME_III(filename): def NAME_to_cube(filenames, callback): - """ - Returns a generator of cubes given a list of filenames and a callback. - """ + """Returns a generator of cubes given a list of filenames and a callback.""" for filename in filenames: header, column_headings, data_arrays = load_NAME_III(filename) diff --git a/docs/gallery_code/general/plot_global_map.py b/docs/gallery_code/general/plot_global_map.py index 8d2bdee174..dbf05d773d 100644 --- a/docs/gallery_code/general/plot_global_map.py +++ b/docs/gallery_code/general/plot_global_map.py @@ -1,5 +1,4 @@ -""" -Quickplot of a 2D Cube on a Map +"""Quickplot of a 2D Cube on a Map =============================== This example demonstrates a contour plot of global air temperature. The plot diff --git a/docs/gallery_code/general/plot_inset.py b/docs/gallery_code/general/plot_inset.py index 004d01bc6f..b173fb2044 100644 --- a/docs/gallery_code/general/plot_inset.py +++ b/docs/gallery_code/general/plot_inset.py @@ -1,5 +1,4 @@ -""" -Test Data Showing Inset Plots +"""Test Data Showing Inset Plots ============================= This example demonstrates the use of a single 3D data cube with time, latitude diff --git a/docs/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py index aad7906acd..81b89c8d55 100644 --- a/docs/gallery_code/general/plot_lineplot_with_legend.py +++ b/docs/gallery_code/general/plot_lineplot_with_legend.py @@ -1,5 +1,4 @@ -""" -Multi-Line Temperature Profile Plot +"""Multi-Line Temperature Profile Plot ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ """ diff --git a/docs/gallery_code/general/plot_polar_stereo.py b/docs/gallery_code/general/plot_polar_stereo.py index 71c0f3b00e..27201018d4 100644 --- a/docs/gallery_code/general/plot_polar_stereo.py +++ b/docs/gallery_code/general/plot_polar_stereo.py @@ -1,5 +1,4 @@ -""" -Example of a Polar Stereographic Plot +"""Example of a Polar Stereographic Plot ===================================== Demonstrates plotting data that are defined on a polar stereographic diff --git a/docs/gallery_code/general/plot_polynomial_fit.py b/docs/gallery_code/general/plot_polynomial_fit.py index 0c113a2e6c..10e844af1a 100644 --- a/docs/gallery_code/general/plot_polynomial_fit.py +++ b/docs/gallery_code/general/plot_polynomial_fit.py @@ -1,5 +1,4 @@ -""" -Fitting a Polynomial +"""Fitting a Polynomial ==================== This example demonstrates computing a polynomial fit to 1D data from an Iris diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index 76c7206748..7ddee9531c 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -1,5 +1,4 @@ -""" -Plotting in Different Projections +"""Plotting in Different Projections ================================= This example shows how to overlay data and graphics in different projections, diff --git a/docs/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py index 30975a4828..0233ade6a7 100644 --- a/docs/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -1,5 +1,4 @@ -""" -Rotated Pole Mapping +"""Rotated Pole Mapping ===================== This example uses several visualisation methods to achieve an array of diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py index 195f8b4bb0..3f6e043547 100644 --- a/docs/gallery_code/general/plot_zonal_means.py +++ b/docs/gallery_code/general/plot_zonal_means.py @@ -1,5 +1,4 @@ -""" -Zonal Mean Diagram of Air Temperature +"""Zonal Mean Diagram of Air Temperature ===================================== This example demonstrates aligning a linear plot and a cartographic plot using Matplotlib. """ diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index 516b54dbff..6cdbd50114 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -1,5 +1,4 @@ -""" -Global Average Annual Temperature Plot +"""Global Average Annual Temperature Plot ====================================== Produces a time-series plot of North American temperature forecasts for 2 diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 076b70a3ad..108407c09e 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -1,5 +1,4 @@ -""" -Global Average Annual Temperature Maps +"""Global Average Annual Temperature Maps ====================================== Produces maps of global temperature forecasts from the A1B and E1 scenarios. @@ -33,8 +32,7 @@ def cop_metadata_callback(cube, field, filename): - """ - A function which adds an "Experiment" coordinate which comes from the + """A function which adds an "Experiment" coordinate which comes from the filename. """ diff --git a/docs/gallery_code/meteorology/plot_TEC.py b/docs/gallery_code/meteorology/plot_TEC.py index 71a743a161..50619ca870 100644 --- a/docs/gallery_code/meteorology/plot_TEC.py +++ b/docs/gallery_code/meteorology/plot_TEC.py @@ -1,5 +1,4 @@ -""" -Ionosphere Space Weather +"""Ionosphere Space Weather ======================== This space weather example plots a filled contour of rotated pole point diff --git a/docs/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py index e457afc383..bd9775e657 100644 --- a/docs/gallery_code/meteorology/plot_deriving_phenomena.py +++ b/docs/gallery_code/meteorology/plot_deriving_phenomena.py @@ -1,5 +1,4 @@ -""" -Deriving Exner Pressure and Air Temperature +"""Deriving Exner Pressure and Air Temperature =========================================== This example shows some processing of cubes in order to derive further related @@ -21,8 +20,7 @@ def limit_colorbar_ticks(contour_object): - """ - Takes a contour object which has an associated colorbar and limits the + """Takes a contour object which has an associated colorbar and limits the number of ticks on the colorbar to 4. """ diff --git a/docs/gallery_code/meteorology/plot_hovmoller.py b/docs/gallery_code/meteorology/plot_hovmoller.py index e9f8207a94..2e76be98d6 100644 --- a/docs/gallery_code/meteorology/plot_hovmoller.py +++ b/docs/gallery_code/meteorology/plot_hovmoller.py @@ -1,5 +1,4 @@ -""" -Hovmoller Diagram of Monthly Surface Temperature +"""Hovmoller Diagram of Monthly Surface Temperature ================================================ This example demonstrates the creation of a Hovmoller diagram with fine control diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index a84a348699..32798c124f 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -1,5 +1,4 @@ -""" -Seasonal Ensemble Model Plots +"""Seasonal Ensemble Model Plots ============================= This example demonstrates the loading of a lagged ensemble dataset from the @@ -27,8 +26,7 @@ def realization_metadata(cube, field, fname): - """ - A function which modifies the cube's metadata to add a "realization" + """A function which modifies the cube's metadata to add a "realization" (ensemble member) coordinate from the filename if one doesn't already exist in the cube. diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index 7945a7f896..4f776144e6 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -1,5 +1,4 @@ -""" -Plotting Wind Direction Using Barbs +"""Plotting Wind Direction Using Barbs =================================== This example demonstrates using barbs to plot wind speed contours and wind diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index e00f9af654..a1820e980e 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -1,5 +1,4 @@ -""" -Plotting Wind Direction Using Quiver +"""Plotting Wind Direction Using Quiver ==================================== This example demonstrates using quiver to plot wind speed contours and wind diff --git a/docs/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py index 6604b61ec3..3886c61aff 100644 --- a/docs/gallery_code/oceanography/plot_atlantic_profiles.py +++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py @@ -1,5 +1,4 @@ -""" -Oceanographic Profiles and T-S Diagrams +"""Oceanographic Profiles and T-S Diagrams ======================================= This example demonstrates how to plot vertical profiles of different diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index a6c0ce0de6..52fbb33484 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -1,5 +1,4 @@ -""" -Load a Time Series of Data From the NEMO Model +"""Load a Time Series of Data From the NEMO Model ============================================== This example demonstrates how to load multiple files containing data output by diff --git a/docs/gallery_code/oceanography/plot_orca_projection.py b/docs/gallery_code/oceanography/plot_orca_projection.py index e4bc073a46..d9968de0ae 100644 --- a/docs/gallery_code/oceanography/plot_orca_projection.py +++ b/docs/gallery_code/oceanography/plot_orca_projection.py @@ -1,5 +1,4 @@ -""" -Tri-Polar Grid Projected Plotting +"""Tri-Polar Grid Projected Plotting ================================= This example demonstrates cell plots of data on the semi-structured ORCA2 model diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py index d3ca8309f8..b1b83b7f42 100644 --- a/docs/gallery_tests/conftest.py +++ b/docs/gallery_tests/conftest.py @@ -18,8 +18,7 @@ @pytest.fixture def image_setup_teardown(): - """ - Setup and teardown fixture. + """Setup and teardown fixture. Ensures all figures are closed before and after test to prevent one test polluting another if it fails with a figure unclosed. @@ -32,8 +31,7 @@ def image_setup_teardown(): @pytest.fixture def import_patches(monkeypatch): - """ - Replace plt.show() with a function that does nothing, also add all the + """Replace plt.show() with a function that does nothing, also add all the gallery examples to sys.path. """ @@ -52,8 +50,7 @@ def no_show(): @pytest.fixture def iris_future_defaults(): - """ - Create a fixture which resets all the iris.FUTURE settings to the defaults, + """Create a fixture which resets all the iris.FUTURE settings to the defaults, as otherwise changes made in one test can affect subsequent ones. """ diff --git a/docs/src/developers_guide/documenting/docstrings_attribute.py b/docs/src/developers_guide/documenting/docstrings_attribute.py index 81c408c7d9..18bb0c9c6f 100644 --- a/docs/src/developers_guide/documenting/docstrings_attribute.py +++ b/docs/src/developers_guide/documenting/docstrings_attribute.py @@ -1,12 +1,8 @@ class ExampleClass: - """ - Class Summary - - """ + """Class Summary""" def __init__(self, arg1, arg2): - """ - Purpose section description. + """Purpose section description. Description section text. @@ -28,8 +24,7 @@ def __init__(self, arg1, arg2): @property def square(self): - """ - *(read-only)* Purpose section description. + """*(read-only)* Purpose section description. Returns: int. diff --git a/docs/src/developers_guide/documenting/docstrings_sample_routine.py b/docs/src/developers_guide/documenting/docstrings_sample_routine.py index a2c08005d7..65a312c027 100644 --- a/docs/src/developers_guide/documenting/docstrings_sample_routine.py +++ b/docs/src/developers_guide/documenting/docstrings_sample_routine.py @@ -1,6 +1,5 @@ def sample_routine(arg1, arg2, kwarg1="foo", kwarg2=None): - """ - Purpose section text goes here. + """Purpose section text goes here. Description section longer text goes here. diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst index e704b93de2..c71cae433a 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/further_topics/filtering_warnings.rst @@ -47,9 +47,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:449: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:434: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:800: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:772: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -110,7 +110,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:449: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:434: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) :: @@ -128,7 +128,9 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=449) ... my_operation() ... - iris/coord_systems.py:800: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:434: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) + iris/coord_systems.py:772: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: @@ -188,7 +190,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:449: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:434: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) ---- diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 4ce5c8ee45..049222953f 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Automatic concatenation of multiple cubes over one or more existing dimensions. +"""Automatic concatenation of multiple cubes over one or more existing dimensions. """ @@ -37,8 +36,7 @@ class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): - """ - Container for a coordinate and the associated data dimension(s) + """Container for a coordinate and the associated data dimension(s) spanned over a :class:`iris.cube.Cube`. Args: @@ -61,8 +59,7 @@ class _CoordMetaData( ["defn", "dims", "points_dtype", "bounds_dtype", "kwargs"], ) ): - """ - Container for the metadata that defines a dimension or auxiliary + """Container for the metadata that defines a dimension or auxiliary coordinate. Args: @@ -86,8 +83,7 @@ class _CoordMetaData( """ def __new__(mcs, coord, dims): - """ - Create a new :class:`_CoordMetaData` instance. + """Create a new :class:`_CoordMetaData` instance. Args: @@ -164,8 +160,7 @@ def name(self): class _DerivedCoordAndDims( namedtuple("DerivedCoordAndDims", ["coord", "dims", "aux_factory"]) ): - """ - Container for a derived coordinate, the associated AuxCoordFactory, and the + """Container for a derived coordinate, the associated AuxCoordFactory, and the associated data dimension(s) spanned over a :class:`iris.cube.Cube`. Args: @@ -195,8 +190,7 @@ def __eq__(self, other): class _OtherMetaData(namedtuple("OtherMetaData", ["defn", "dims"])): - """ - Container for the metadata that defines a cell measure or ancillary + """Container for the metadata that defines a cell measure or ancillary variable. Args: @@ -211,8 +205,7 @@ class _OtherMetaData(namedtuple("OtherMetaData", ["defn", "dims"])): """ def __new__(cls, ancil, dims): - """ - Create a new :class:`_OtherMetaData` instance. + """Create a new :class:`_OtherMetaData` instance. Args: @@ -254,8 +247,7 @@ def name(self): class _SkeletonCube(namedtuple("SkeletonCube", ["signature", "data"])): - """ - Basis of a source-cube, containing the associated coordinate metadata, + """Basis of a source-cube, containing the associated coordinate metadata, coordinates and cube data payload. Args: @@ -272,8 +264,7 @@ class _SkeletonCube(namedtuple("SkeletonCube", ["signature", "data"])): class _Extent(namedtuple("Extent", ["min", "max"])): - """ - Container representing the limits of a one-dimensional extent/range. + """Container representing the limits of a one-dimensional extent/range. Args: @@ -289,8 +280,7 @@ class _Extent(namedtuple("Extent", ["min", "max"])): class _CoordExtent(namedtuple("CoordExtent", ["points", "bounds"])): - """ - Container representing the points and bounds extent of a one dimensional + """Container representing the points and bounds extent of a one dimensional coordinate. Args: @@ -316,8 +306,7 @@ def concatenate( check_ancils=True, check_derived_coords=True, ): - """ - Concatenate the provided cubes over common existing dimensions. + """Concatenate the provided cubes over common existing dimensions. Args: @@ -411,15 +400,13 @@ def _none_sort(item): class _CubeSignature: - """ - Template for identifying a specific type of :class:`iris.cube.Cube` based + """Template for identifying a specific type of :class:`iris.cube.Cube` based on its metadata, coordinates and cell_measures. """ def __init__(self, cube): - """ - Represents the cube metadata and associated coordinate metadata that + """Represents the cube metadata and associated coordinate metadata that allows suitable cubes for concatenation to be identified. Args: @@ -511,8 +498,7 @@ def name_key_func(factory): self.derived_coords_and_dims.append(coord_and_dims) def _coordinate_differences(self, other, attr, reason="metadata"): - """ - Determine the names of the coordinates that differ between `self` and + """Determine the names of the coordinates that differ between `self` and `other` for a coordinate attribute on a _CubeSignature. Args: @@ -560,8 +546,7 @@ def _coordinate_differences(self, other, attr, reason="metadata"): return result def match(self, other, error_on_mismatch): - """ - Return whether this _CubeSignature equals another. + """Return whether this _CubeSignature equals another. This is the first step to determine if two "cubes" (either a real Cube or a ProtoCube) can be concatenated, by considering: @@ -639,15 +624,13 @@ def match(self, other, error_on_mismatch): class _CoordSignature: - """ - Template for identifying a specific type of :class:`iris.cube.Cube` based + """Template for identifying a specific type of :class:`iris.cube.Cube` based on its coordinates. """ def __init__(self, cube_signature): - """ - Represents the coordinate metadata required to identify suitable + """Represents the coordinate metadata required to identify suitable non-overlapping :class:`iris.cube.Cube` source-cubes for concatenation over a common single dimension. @@ -673,8 +656,7 @@ def __init__(self, cube_signature): @staticmethod def _cmp(coord, other): - """ - Compare the coordinates for concatenation compatibility. + """Compare the coordinates for concatenation compatibility. Returns: A boolean tuple pair of whether the coordinates are compatible, @@ -698,8 +680,7 @@ def _cmp(coord, other): return result, candidate_axis def candidate_axis(self, other): - """ - Determine the candidate axis of concatenation with the + """Determine the candidate axis of concatenation with the given coordinate signature. If a candidate axis is found, then the coordinate @@ -737,10 +718,7 @@ def candidate_axis(self, other): return result def _calculate_extents(self): - """ - Calculate the extent over each dimension coordinates points and bounds. - - """ + """Calculate the extent over each dimension coordinates points and bounds.""" self.dim_extents = [] for coord, order in zip(self.dim_coords, self.dim_order): if order == _CONSTANT or order == _INCREASING: @@ -767,15 +745,13 @@ def _calculate_extents(self): class _ProtoCube: - """ - Framework for concatenating multiple source-cubes over one + """Framework for concatenating multiple source-cubes over one common dimension. """ def __init__(self, cube): - """ - Create a new _ProtoCube from the given cube and record the cube + """Create a new _ProtoCube from the given cube and record the cube as a source-cube. Args: @@ -809,8 +785,7 @@ def axis(self): return self._axis def concatenate(self): - """ - Concatenates all the source-cubes registered with the + """Concatenates all the source-cubes registered with the :class:`_ProtoCube` over the nominated common dimension. Returns: @@ -885,8 +860,7 @@ def register( check_ancils=False, check_derived_coords=False, ): - """ - Determine whether the given source-cube is suitable for concatenation + """Determine whether the given source-cube is suitable for concatenation with this :class:`_ProtoCube`. Args: @@ -1044,8 +1018,7 @@ def register( return match def _add_skeleton(self, coord_signature, data): - """ - Create and add the source-cube skeleton to the + """Create and add the source-cube skeleton to the :class:`_ProtoCube`. Args: @@ -1063,8 +1036,7 @@ def _add_skeleton(self, coord_signature, data): self._skeletons.append(skeleton) def _build_aux_coordinates(self): - """ - Generate the auxiliary coordinates with associated dimension(s) + """Generate the auxiliary coordinates with associated dimension(s) mapping for the new concatenated cube. Returns: @@ -1121,8 +1093,7 @@ def _build_aux_coordinates(self): return aux_coords_and_dims def _build_scalar_coordinates(self): - """ - Generate the scalar coordinates for the new concatenated cube. + """Generate the scalar coordinates for the new concatenated cube. Returns: A list of scalar coordinates. @@ -1135,8 +1106,7 @@ def _build_scalar_coordinates(self): return scalar_coords def _build_cell_measures(self): - """ - Generate the cell measures with associated dimension(s) + """Generate the cell measures with associated dimension(s) mapping for the new concatenated cube. Returns: @@ -1173,8 +1143,7 @@ def _build_cell_measures(self): return cell_measures_and_dims def _build_ancillary_variables(self): - """ - Generate the ancillary variables with associated dimension(s) + """Generate the ancillary variables with associated dimension(s) mapping for the new concatenated cube. Returns: @@ -1213,8 +1182,7 @@ def _build_ancillary_variables(self): def _build_aux_factories( self, dim_coords_and_dims, aux_coords_and_dims, scalar_coords ): - """ - Generate the aux factories for the new concatenated cube. + """Generate the aux factories for the new concatenated cube. Args: @@ -1283,8 +1251,7 @@ def _build_aux_factories( return aux_factories def _build_data(self): - """ - Generate the data payload for the new concatenated cube. + """Generate the data payload for the new concatenated cube. Returns: The concatenated :class:`iris.cube.Cube` data payload. @@ -1298,8 +1265,7 @@ def _build_data(self): return data def _build_dim_coordinates(self): - """ - Generate the dimension coordinates with associated dimension + """Generate the dimension coordinates with associated dimension mapping for the new concatenated cube. Return: @@ -1347,8 +1313,7 @@ def _build_dim_coordinates(self): return dim_coords_and_dims def _sequence(self, extent, axis): - """ - Determine whether the given extent can be sequenced along with + """Determine whether the given extent can be sequenced along with all the extents of the source-cubes already registered with this :class:`_ProtoCube` into non-overlapping segments for the given axis. diff --git a/lib/iris/_data_manager.py b/lib/iris/_data_manager.py index ea62ff5fb9..601066630c 100644 --- a/lib/iris/_data_manager.py +++ b/lib/iris/_data_manager.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Management of common state and behaviour for cube and coordinate data. +"""Management of common state and behaviour for cube and coordinate data. """ @@ -16,14 +15,10 @@ class DataManager: - """ - Provides a well defined API for management of real or lazy data. - - """ + """Provides a well defined API for management of real or lazy data.""" def __init__(self, data): - """ - Create a data manager for the specified data. + """Create a data manager for the specified data. Args: @@ -44,8 +39,7 @@ def __init__(self, data): self._assert_axioms() def __copy__(self): - """ - Forbid :class:`~iris._data_manager.DataManager` instance + """Forbid :class:`~iris._data_manager.DataManager` instance shallow-copy support. """ @@ -57,8 +51,7 @@ def __copy__(self): raise copy.Error(emsg.format(name, name)) def __deepcopy__(self, memo): - """ - Allow :class:`~iris._data_manager.DataManager` instance + """Allow :class:`~iris._data_manager.DataManager` instance deepcopy support. Args: @@ -70,8 +63,7 @@ def __deepcopy__(self, memo): return self._deepcopy(memo) def __eq__(self, other): - """ - Perform :class:`~iris._data_manager.DataManager` instance equality. + """Perform :class:`~iris._data_manager.DataManager` instance equality. Note that, this is explicitly not a lazy operation and will load any lazy payload to determine the equality result. @@ -103,8 +95,7 @@ def __eq__(self, other): return result def __ne__(self, other): - """ - Perform :class:`~iris._data_manager.DataManager` instance inequality. + """Perform :class:`~iris._data_manager.DataManager` instance inequality. Note that, this is explicitly not a lazy operation and will load any lazy payload to determine the inequality result. @@ -126,20 +117,14 @@ def __ne__(self, other): return result def __repr__(self): - """ - Returns an string representation of the instance. - - """ + """Returns an string representation of the instance.""" fmt = "{cls}({data!r})" result = fmt.format(data=self.core_data(), cls=type(self).__name__) return result def _assert_axioms(self): - """ - Definition of the manager state, that should never be violated. - - """ + """Definition of the manager state, that should never be violated.""" # Ensure there is a valid data state. is_lazy = self._lazy_array is not None is_real = self._real_array is not None @@ -148,8 +133,7 @@ def _assert_axioms(self): assert state, emsg.format("" if is_lazy else "no ", "" if is_real else "no ") def _deepcopy(self, memo, data=None): - """ - Perform a deepcopy of the :class:`~iris._data_manager.DataManager` + """Perform a deepcopy of the :class:`~iris._data_manager.DataManager` instance. Args: @@ -190,8 +174,7 @@ def _deepcopy(self, memo, data=None): @property def data(self): - """ - Returns the real data. Any lazy data being managed will be realised. + """Returns the real data. Any lazy data being managed will be realised. Returns: :class:`~numpy.ndarray` or :class:`numpy.ma.core.MaskedArray`. @@ -222,8 +205,7 @@ def data(self): @data.setter def data(self, data): - """ - Replaces the currently managed data with the specified data, which must + """Replaces the currently managed data with the specified data, which must be of an equivalent shape. Note that, the only shape promotion permitted is for 0-dimensional @@ -273,31 +255,21 @@ def data(self, data): @property def dtype(self): - """ - The dtype of the realised lazy data or the dtype of the real data. - - """ + """The dtype of the realised lazy data or the dtype of the real data.""" return self.core_data().dtype @property def ndim(self): - """ - The number of dimensions covered by the data being managed. - - """ + """The number of dimensions covered by the data being managed.""" return self.core_data().ndim @property def shape(self): - """ - The shape of the data being managed. - - """ + """The shape of the data being managed.""" return self.core_data().shape def copy(self, data=None): - """ - Returns a deep copy of this :class:`~iris._data_manager.DataManager` + """Returns a deep copy of this :class:`~iris._data_manager.DataManager` instance. Kwargs: @@ -313,8 +285,7 @@ def copy(self, data=None): return self._deepcopy(memo, data=data) def core_data(self): - """ - If real data is being managed, then return the :class:`~numpy.ndarray` + """If real data is being managed, then return the :class:`~numpy.ndarray` or :class:`numpy.ma.core.MaskedArray`. Otherwise, return the lazy :class:`~dask.array.core.Array`. @@ -330,8 +301,7 @@ def core_data(self): return result def has_lazy_data(self): - """ - Determine whether lazy data is being managed. + """Determine whether lazy data is being managed. Returns: Boolean. @@ -340,8 +310,7 @@ def has_lazy_data(self): return self._lazy_array is not None def lazy_data(self): - """ - Return the lazy representation of the managed data. + """Return the lazy representation of the managed data. If only real data is being managed, then return a lazy representation of that real data. diff --git a/lib/iris/_deprecation.py b/lib/iris/_deprecation.py index ad4dc5a560..711e4081cd 100644 --- a/lib/iris/_deprecation.py +++ b/lib/iris/_deprecation.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Utilities for producing runtime deprecation messages. +"""Utilities for producing runtime deprecation messages. """ @@ -11,8 +10,7 @@ class IrisDeprecation(UserWarning): - """ - An Iris deprecation warning. + """An Iris deprecation warning. Note this subclasses UserWarning for backwards compatibility with Iris' original deprecation warnings. Should subclass DeprecationWarning at the @@ -23,8 +21,7 @@ class IrisDeprecation(UserWarning): def warn_deprecated(msg, stacklevel=2): - """ - Issue an Iris deprecation warning. + """Issue an Iris deprecation warning. Calls :func:`warnings.warn', to emit the message 'msg' as a :class:`warnings.warning`, of the subclass :class:`IrisDeprecationWarning`. diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 8c2f33b175..5cfa08a7de 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Routines for lazy data handling. +"""Routines for lazy data handling. To avoid replicating implementation-dependent test and conversion code. @@ -20,10 +19,7 @@ def non_lazy(func): - """ - Turn a lazy function into a function that returns a result immediately. - - """ + """Turn a lazy function into a function that returns a result immediately.""" @wraps(func) def inner(*args, **kwargs): @@ -35,8 +31,7 @@ def inner(*args, **kwargs): def is_lazy_data(data): - """ - Return whether the argument is an Iris 'lazy' data array. + """Return whether the argument is an Iris 'lazy' data array. At present, this means simply a :class:`dask.array.Array`. We determine this by checking for a "compute" property. @@ -47,8 +42,7 @@ def is_lazy_data(data): def is_lazy_masked_data(data): - """ - Return True if the argument is both an Iris 'lazy' data array and the + """Return True if the argument is both an Iris 'lazy' data array and the underlying array is of masked type. Otherwise return False. """ @@ -64,8 +58,7 @@ def _optimum_chunksize_internals( dims_fixed=None, dask_array_chunksize=dask.config.get("array.chunk-size"), ): - """ - Reduce or increase an initial chunk shape to get close to a chosen ideal + """Reduce or increase an initial chunk shape to get close to a chosen ideal size, while prioritising the splitting of the earlier (outer) dimensions and keeping intact the later (inner) ones. @@ -225,8 +218,7 @@ def _optimum_chunksize( def as_lazy_data( data, chunks=None, asarray=False, dims_fixed=None, dask_chunking=False ): - """ - Convert the input array `data` to a :class:`dask.array.Array`. + """Convert the input array `data` to a :class:`dask.array.Array`. Args: @@ -300,8 +292,7 @@ def as_lazy_data( def _co_realise_lazy_arrays(arrays): - """ - Compute multiple lazy arrays and return a list of real values. + """Compute multiple lazy arrays and return a list of real values. All the arrays are computed together, so they can share results for common graph elements. @@ -334,8 +325,7 @@ def _co_realise_lazy_arrays(arrays): def as_concrete_data(data): - """ - Return the actual content of a lazy array, as a numpy array. + """Return the actual content of a lazy array, as a numpy array. If the input data is a NumPy `ndarray` or masked array, return it unchanged. @@ -357,8 +347,7 @@ def as_concrete_data(data): def multidim_lazy_stack(stack): - """ - Recursively build a multidimensional stacked dask array. + """Recursively build a multidimensional stacked dask array. This is needed because :meth:`dask.array.Array.stack` only accepts a 1-dimensional list. @@ -385,8 +374,7 @@ def multidim_lazy_stack(stack): def co_realise_cubes(*cubes): - """ - Fetch 'real' data for multiple cubes, in a shared calculation. + """Fetch 'real' data for multiple cubes, in a shared calculation. This computes any lazy data, equivalent to accessing each `cube.data`. However, lazy calculations and data fetches can be shared between the @@ -421,8 +409,7 @@ def co_realise_cubes(*cubes): def lazy_elementwise(lazy_array, elementwise_op): - """ - Apply a (numpy-style) elementwise array operation to a lazy array. + """Apply a (numpy-style) elementwise array operation to a lazy array. Elementwise means that it performs a independent calculation at each point of the input, producing a result array of the same shape. diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index d38c9e4982..db2b410b94 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Automatic collation of cubes into higher-dimensional cubes. +"""Automatic collation of cubes into higher-dimensional cubes. Typically the cube merge process is handled by :method:`iris.cube.CubeList.merge`. @@ -35,8 +34,7 @@ # Private namedtuple wrapper classes. # class _Template(namedtuple("Template", ["dims", "points", "bounds", "kwargs"])): - """ - Common framework from which to build a dimension or auxiliary coordinate. + """Common framework from which to build a dimension or auxiliary coordinate. Args: @@ -63,8 +61,7 @@ class _Template(namedtuple("Template", ["dims", "points", "bounds", "kwargs"])): class _CoordMetaData( namedtuple("CoordMetaData", ["points_dtype", "bounds_dtype", "kwargs"]) ): - """ - Bespoke metadata required to build a dimension or auxiliary coordinate. + """Bespoke metadata required to build a dimension or auxiliary coordinate. Args: @@ -85,8 +82,7 @@ class _CoordMetaData( class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): - """ - Container for a coordinate and the associated data dimension/s + """Container for a coordinate and the associated data dimension/s spanned over a :class:`iris.cube.Cube`. Args: @@ -106,8 +102,7 @@ class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): class _ScalarCoordPayload( namedtuple("ScalarCoordPayload", ["defns", "values", "metadata"]) ): - """ - Container for all scalar coordinate data and metadata represented + """Container for all scalar coordinate data and metadata represented within a :class:`iris.cube.Cube`. All scalar coordinate related data is sorted into ascending order @@ -136,8 +131,7 @@ class _ScalarCoordPayload( class _VectorCoordPayload( namedtuple("VectorCoordPayload", ["dim_coords_and_dims", "aux_coords_and_dims"]) ): - """ - Container for all vector coordinate data and metadata represented + """Container for all vector coordinate data and metadata represented within a :class:`iris.cube.Cube`. Args: @@ -160,8 +154,7 @@ class _VectorCoordPayload( class _CoordPayload(namedtuple("CoordPayload", ["scalar", "vector", "factory_defns"])): - """ - Container for all the scalar and vector coordinate data and + """Container for all the scalar and vector coordinate data and metadata, and auxiliary coordinate factories represented within a :class:`iris.cube.Cube`. @@ -219,8 +212,7 @@ def _coords_msgs(msgs, coord_group, defns_a, defns_b): ) def match_signature(self, signature, error_on_mismatch): - """ - Return whether this _CoordPayload matches the corresponding + """Return whether this _CoordPayload matches the corresponding aspects of a _CoordSignature. Args: @@ -292,8 +284,7 @@ class _CoordSignature( ], ) ): - """ - Criterion for identifying a specific type of :class:`iris.cube.Cube` + """Criterion for identifying a specific type of :class:`iris.cube.Cube` based on its scalar and vector coordinate data and metadata, and auxiliary coordinate factories. @@ -334,8 +325,7 @@ class _CubeSignature( ], ) ): - """ - Criterion for identifying a specific type of :class:`iris.cube.Cube` + """Criterion for identifying a specific type of :class:`iris.cube.Cube` based on its metadata. Args: @@ -416,8 +406,7 @@ def _defn_msgs(self, other_defn): return msgs def match(self, other, error_on_mismatch): - """ - Return whether this _CubeSignature equals another. + """Return whether this _CubeSignature equals another. This is the first step to determine if two "cubes" (either a real Cube or a ProtoCube) can be merged, by considering: @@ -462,8 +451,7 @@ def match(self, other, error_on_mismatch): class _Skeleton(namedtuple("Skeleton", ["scalar_values", "data"])): - """ - Basis of a source-cube, containing the associated scalar coordinate values + """Basis of a source-cube, containing the associated scalar coordinate values and data payload of a :class:`iris.cube.Cube`. Args: @@ -483,8 +471,7 @@ class _Skeleton(namedtuple("Skeleton", ["scalar_values", "data"])): class _FactoryDefn(namedtuple("_FactoryDefn", ["class_", "dependency_defns"])): - """ - The information required to identify and rebuild a single AuxCoordFactory. + """The information required to identify and rebuild a single AuxCoordFactory. Args: @@ -501,8 +488,7 @@ class _FactoryDefn(namedtuple("_FactoryDefn", ["class_", "dependency_defns"])): class _Relation(namedtuple("Relation", ["separable", "inseparable"])): - """ - Categorisation of the candidate dimensions belonging to a + """Categorisation of the candidate dimensions belonging to a :class:`ProtoCube` into separable 'independent' dimensions, and inseparable dependent dimensions. @@ -523,8 +509,7 @@ class _Relation(namedtuple("Relation", ["separable", "inseparable"])): def _is_combination(name): - """ - Determine whether the candidate dimension is an 'invented' combination + """Determine whether the candidate dimension is an 'invented' combination of candidate dimensions. Args: @@ -540,8 +525,7 @@ def _is_combination(name): def build_indexes(positions): - """ - Construct a mapping for each candidate dimension that maps for each + """Construct a mapping for each candidate dimension that maps for each of its scalar values the set of values for each of the other candidate dimensions. @@ -606,8 +590,7 @@ def build_indexes(positions): def _separable_pair(name, index): - """ - Determine whether the candidate dimension is separable. + """Determine whether the candidate dimension is separable. A candidate dimension X and Y are separable if each scalar value of X maps to the same set of scalar values of Y. @@ -635,8 +618,7 @@ def _separable_pair(name, index): def _separable(name, indexes): - """ - Determine the candidate dimensions that are separable and + """Determine the candidate dimensions that are separable and inseparable relative to the provided candidate dimension. A candidate dimension X and Y are separable if each scalar @@ -670,8 +652,7 @@ def _separable(name, indexes): def derive_relation_matrix(indexes): - """ - Construct a mapping for each candidate dimension that specifies + """Construct a mapping for each candidate dimension that specifies which of the other candidate dimensions are separable or inseparable. A candidate dimension X and Y are separable if each scalar value of @@ -712,8 +693,7 @@ def derive_relation_matrix(indexes): def derive_groups(relation_matrix): - """ - Determine all related (chained) groups of inseparable candidate dimensions. + """Determine all related (chained) groups of inseparable candidate dimensions. If candidate dimension A is inseparable for B and C, and B is inseparable from D, and E is inseparable from F. Then the groups are ABCD and EF. @@ -748,8 +728,7 @@ def derive_groups(relation_matrix): def _derive_separable_group(relation_matrix, group): - """ - Determine which candidate dimensions in the group are separable. + """Determine which candidate dimensions in the group are separable. Args: @@ -774,8 +753,7 @@ def _derive_separable_group(relation_matrix, group): def _is_dependent(dependent, independent, positions, function_mapping=None): - """ - Determine whether there exists a one-to-one functional relationship + """Determine whether there exists a one-to-one functional relationship between the independent candidate dimension/s and the dependent candidate dimension. @@ -824,8 +802,7 @@ def _is_dependent(dependent, independent, positions, function_mapping=None): def _derive_consistent_groups(relation_matrix, separable_group): - """ - Determine the largest combinations of candidate dimensions within the + """Determine the largest combinations of candidate dimensions within the separable group that are self consistently separable from one another. If the candidate dimension A is separable from the candidate dimensions @@ -873,8 +850,7 @@ def _derive_consistent_groups(relation_matrix, separable_group): def _build_separable_group( space, group, separable_consistent_groups, positions, function_matrix ): - """ - Update the space with the first separable consistent group that + """Update the space with the first separable consistent group that satisfies a valid functional relationship with all other candidate dimensions in the group. @@ -938,8 +914,7 @@ def _build_separable_group( def _build_inseparable_group(space, group, positions, function_matrix): - """ - Update the space with the first valid scalar functional relationship + """Update the space with the first valid scalar functional relationship between a candidate dimension within the group and all other candidate dimensions. @@ -1006,8 +981,7 @@ def _build_inseparable_group(space, group, positions, function_matrix): def _build_combination_group(space, group, positions, function_matrix): - """ - Update the space with the new combined or invented dimension + """Update the space with the new combined or invented dimension that each member of this inseparable group depends on. As no functional relationship between members of the group can be @@ -1061,8 +1035,7 @@ def _build_combination_group(space, group, positions, function_matrix): def derive_space(groups, relation_matrix, positions, function_matrix=None): - """ - Determine the relationship between all the candidate dimensions. + """Determine the relationship between all the candidate dimensions. Args: * groups: @@ -1122,15 +1095,13 @@ def derive_space(groups, relation_matrix, positions, function_matrix=None): class ProtoCube: - """ - Framework for merging source-cubes into one or more higher + """Framework for merging source-cubes into one or more higher dimensional cubes. """ def __init__(self, cube): - """ - Create a new ProtoCube from the given cube and record the cube + """Create a new ProtoCube from the given cube and record the cube as a source-cube. """ @@ -1210,8 +1181,7 @@ def _report_duplicate(self, nd_indexes, group_by_nd_index): raise iris.exceptions.DuplicateDataError(msg) def merge(self, unique=True): - """ - Returns the list of cubes resulting from merging the registered + """Returns the list of cubes resulting from merging the registered source-cubes. Kwargs: @@ -1309,8 +1279,7 @@ def merge(self, unique=True): return merged_cubes def register(self, cube, error_on_mismatch=False): - """ - Add a compatible :class:`iris.cube.Cube` as a source-cube for + """Add a compatible :class:`iris.cube.Cube` as a source-cube for merging under this :class:`ProtoCube`. A cube will be deemed compatible based on the signature of the @@ -1348,8 +1317,7 @@ def register(self, cube, error_on_mismatch=False): return match def _guess_axis(self, name): - """ - Returns a "best guess" axis name of the candidate dimension. + """Returns a "best guess" axis name of the candidate dimension. Heuristic categoration of the candidate dimension (i.e. scalar_defn index) into either label 'T', 'Z', 'Y', 'X' @@ -1376,8 +1344,7 @@ def _guess_axis(self, name): return axis def _define_space(self, space, positions, indexes, function_matrix): - """ - Given the derived :class:`ProtoCube` space, define this space in + """Given the derived :class:`ProtoCube` space, define this space in terms of its dimensionality, shape, coordinates and associated coordinate to space dimension mappings. @@ -1553,8 +1520,7 @@ def name_in_independents(): self._shape.extend(signature.data_shape) def _get_cube(self, data): - """ - Return a fully constructed cube for the given data, containing + """Return a fully constructed cube for the given data, containing all its coordinates and metadata. """ @@ -1594,8 +1560,7 @@ def _get_cube(self, data): return cube def _nd_index(self, position): - """ - Returns the n-dimensional index of this source-cube (position), + """Returns the n-dimensional index of this source-cube (position), within the merged cube. """ @@ -1617,8 +1582,7 @@ def _nd_index(self, position): return tuple(index) def _build_coordinates(self): - """ - Build the dimension and auxiliary coordinates for the final + """Build the dimension and auxiliary coordinates for the final merged cube given that the final dimensionality of the target merged cube is known and the associated dimension/s that each coordinate maps onto in that merged cube. @@ -1689,8 +1653,7 @@ def _build_coordinates(self): aux_coords_and_dims.append(_CoordAndDims(item.coord, dims)) def _build_signature(self, cube): - """ - Generate the signature that defines this cube. + """Generate the signature that defines this cube. Args: @@ -1720,8 +1683,7 @@ def _add_cube(self, cube, coord_payload): self._skeletons.append(skeleton) def _extract_coord_payload(self, cube): - """ - Extract all relevant coordinate data and metadata from the cube. + """Extract all relevant coordinate data and metadata from the cube. In particular, for each scalar coordinate determine its definition, its cell (point and bound) value and all other scalar coordinate diff --git a/lib/iris/_representation/__init__.py b/lib/iris/_representation/__init__.py index aec46ec927..cc312b5c9c 100644 --- a/lib/iris/_representation/__init__.py +++ b/lib/iris/_representation/__init__.py @@ -2,7 +2,6 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Code to make printouts and other representations (e.g. html) of Iris objects. +"""Code to make printouts and other representations (e.g. html) of Iris objects. """ diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index 3044d072f9..81f60e595d 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides text printouts of Iris cubes. +"""Provides text printouts of Iris cubes. """ from copy import deepcopy @@ -12,8 +11,7 @@ class Table: - """ - A container of text strings in rows + columns, that can format its content + """A container of text strings in rows + columns, that can format its content into a string per row, with contents in columns of fixed width. Supports left- or right- aligned columns, alignment being set "per row". @@ -52,8 +50,7 @@ def __init__(self, cols, aligns, i_col_unlimited=None): # - a crude alternative to proper column spanning def add_row(self, cols, aligns, i_col_unlimited=None): - """ - Create a new row at the bottom. + """Create a new row at the bottom. Args: * cols (list of string): @@ -120,8 +117,7 @@ def __str__(self): class CubePrinter: - """ - An object created from a + """An object created from a :class:`iris._representation.CubeSummary`, which provides text printout of a :class:`iris.cube.Cube`. @@ -135,8 +131,7 @@ class CubePrinter: N_INDENT_EXTRA = 4 def __init__(self, cube_or_summary): - """ - An object that provides a printout of a cube. + """An object that provides a printout of a cube. Args: @@ -266,8 +261,7 @@ def add_scalar_row(name, value=""): @staticmethod def _decorated_table(table, name_padding=None): - """ - Return a modified table with added characters in the header. + """Return a modified table with added characters in the header. Note: 'name_padding' sets a minimum width for the name column (#0). @@ -324,8 +318,7 @@ def _multiline_summary(self, name_padding): return result def to_string(self, oneline=False, name_padding=35): - """ - Produce a printable summary. + """Produce a printable summary. Args: * oneline (bool): diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 58730af2b5..bae63ccb40 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides objects describing cube summaries. +"""Provides objects describing cube summaries. """ import re @@ -76,9 +75,7 @@ def array_repr(arr): def value_repr(value, quote_strings=False, clip_strings=False): - """ - Produce a single-line printable version of an attribute or scalar value. - """ + """Produce a single-line printable version of an attribute or scalar value.""" if hasattr(value, "dtype"): value = array_repr(value) elif isinstance(value, str): @@ -262,10 +259,7 @@ def __init__(self, title, cell_methods): class CubeSummary: - """ - This class provides a structure for output representations of an Iris cube. - - """ + """This class provides a structure for output representations of an Iris cube.""" def __init__(self, cube, name_padding=35): self.header = FullHeader(cube, name_padding) diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index 3916088e33..f882860d25 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -18,14 +18,10 @@ class AreaWeightedRegridder: - """ - This class provides support for performing area-weighted regridding. - - """ + """This class provides support for performing area-weighted regridding.""" def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): - """ - Create an area-weighted regridder for conversions between the source + """Create an area-weighted regridder for conversions between the source and target grids. Args: @@ -80,8 +76,7 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): ) = _regrid_info def __call__(self, cube): - """ - Regrid this :class:`~iris.cube.Cube` onto the target grid of + """Regrid this :class:`~iris.cube.Cube` onto the target grid of this :class:`AreaWeightedRegridder`. The given cube must be defined with the same grid as the source @@ -138,8 +133,7 @@ def __call__(self, cube): def _get_xy_coords(cube): - """ - Return the x and y coordinates from a cube. + """Return the x and y coordinates from a cube. This function will preferentially return a pair of dimension coordinates (if there are more than one potential x or y dimension @@ -221,8 +215,7 @@ def _get_xy_coords(cube): def _get_bounds_in_units(coord, units, dtype): - """ - Return a copy of coord's bounds in the specified units and dtype. + """Return a copy of coord's bounds in the specified units and dtype. Return as contiguous bounds. """ @@ -234,8 +227,7 @@ def _get_bounds_in_units(coord, units, dtype): def _regrid_area_weighted_rectilinear_src_and_grid__prepare(src_cube, grid_cube): - """ - First (setup) part of 'regrid_area_weighted_rectilinear_src_and_grid'. + """First (setup) part of 'regrid_area_weighted_rectilinear_src_and_grid'. Check inputs and calculate related info. The 'regrid info' returned can be re-used over many 2d slices. @@ -377,8 +369,7 @@ def _calculate_regrid_area_weighted_weights( def _regrid_area_weighted_rectilinear_src_and_grid__perform( src_cube, regrid_info, mdtol ): - """ - Second (regrid) part of 'regrid_area_weighted_rectilinear_src_and_grid'. + """Second (regrid) part of 'regrid_area_weighted_rectilinear_src_and_grid'. Perform the prepared regrid calculation on a single 2d cube. @@ -454,8 +445,7 @@ def regrid_callback(*args, **kwargs): def _get_coord_to_coord_matrix_info(src_bounds, tgt_bounds, circular=False, mod=None): - """ - First part of weight calculation. + """First part of weight calculation. Calculate the weights contribution from a single pair of coordinate bounds. Search for pairs of overlapping source and @@ -554,8 +544,7 @@ def _get_coord_to_coord_matrix_info(src_bounds, tgt_bounds, circular=False, mod= def _combine_xy_weights(x_info, y_info, src_shape, tgt_shape): - """ - Second part of weight calculation. + """Second part of weight calculation. Combine the weights contributions from both pairs of coordinate bounds (i.e. the source/target pairs for the x and y coords). @@ -591,8 +580,7 @@ def _combine_xy_weights(x_info, y_info, src_shape, tgt_shape): def _standard_regrid_no_masks(data, weights, tgt_shape): - """ - Regrid unmasked data to an unmasked result. + """Regrid unmasked data to an unmasked result. Assumes that the first two dimensions are the x-y grid. """ @@ -611,8 +599,7 @@ def _standard_regrid_no_masks(data, weights, tgt_shape): def _standard_regrid(data, weights, tgt_shape, mdtol): - """ - Regrid data and handle masks. + """Regrid data and handle masks. Assumes that the first two dimensions are the x-y grid. """ diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index ae2fb433c1..6a0ba3e1a4 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Code to implement vector rotation by angles, and inferring gridcell angles +"""Code to implement vector rotation by angles, and inferring gridcell angles from coordinate points and bounds. """ @@ -15,8 +14,7 @@ def _3d_xyz_from_latlon(lon, lat): - """ - Return locations of (lon, lat) in 3D space. + """Return locations of (lon, lat) in 3D space. Args: @@ -45,8 +43,7 @@ def _3d_xyz_from_latlon(lon, lat): def _latlon_from_xyz(xyz): - """ - Return arrays of lons+lats angles from xyz locations. + """Return arrays of lons+lats angles from xyz locations. Args: @@ -70,8 +67,7 @@ def _latlon_from_xyz(xyz): def _angle(p, q, r): - """ - Estimate grid-angles to true-Eastward direction from positions in the same + """Estimate grid-angles to true-Eastward direction from positions in the same grid row, but at increasing column (grid-Eastward) positions. {P, Q, R} are locations of consecutive points in the same grid row. @@ -135,8 +131,7 @@ def _angle(p, q, r): def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"): - """ - Calculate gridcell orientations for an arbitrary 2-dimensional grid. + """Calculate gridcell orientations for an arbitrary 2-dimensional grid. The input grid is defined by two 2-dimensional coordinate arrays with the same dimensions (ny, nx), specifying the geolocations of a 2D mesh. @@ -394,8 +389,7 @@ def transform_xy_arrays(x, y): def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwargs=None): - """ - Rotate distance vectors from grid-oriented to true-latlon-oriented. + """Rotate distance vectors from grid-oriented to true-latlon-oriented. Can also rotate by arbitrary angles, if they are passed in. diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index f0d31e4361..7c28d24efa 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -33,8 +33,7 @@ def _canonical_sample_points(coords, sample_points): - """ - Return the canonical form of the points values. + """Return the canonical form of the points values. Ensures that any points supplied as datetime objects, or similar, are converted to their numeric form. @@ -58,8 +57,7 @@ def convert_date(date): def extend_circular_coord(coord, points): - """ - Return coordinates points with a shape extended by one + """Return coordinates points with a shape extended by one This is common when dealing with circular coordinates. """ @@ -69,8 +67,7 @@ def extend_circular_coord(coord, points): def extend_circular_coord_and_data(coord, data, coord_dim): - """ - Return coordinate points and a data array with a shape extended by one + """Return coordinate points and a data array with a shape extended by one in the coord_dim axis. This is common when dealing with circular coordinates. @@ -90,8 +87,7 @@ def extend_circular_data(data, coord_dim): def get_xy_dim_coords(cube): - """ - Return the x and y dimension coordinates from a cube. + """Return the x and y dimension coordinates from a cube. This function raises a ValueError if the cube does not contain one and only one set of x and y dimension coordinates. It also raises a ValueError @@ -111,8 +107,7 @@ def get_xy_dim_coords(cube): def get_xy_coords(cube, dim_coords=False): - """ - Return the x and y coordinates from a cube. + """Return the x and y coordinates from a cube. This function raises a ValueError if the cube does not contain one and only one set of x and y coordinates. It also raises a ValueError @@ -159,8 +154,7 @@ def get_xy_coords(cube, dim_coords=False): def snapshot_grid(cube): - """ - Helper function that returns deep copies of lateral (dimension) coordinates + """Helper function that returns deep copies of lateral (dimension) coordinates from a cube. """ @@ -169,15 +163,13 @@ def snapshot_grid(cube): class RectilinearInterpolator: - """ - This class provides support for performing nearest-neighbour or + """This class provides support for performing nearest-neighbour or linear interpolation over one or more orthogonal dimensions. """ def __init__(self, src_cube, coords, method, extrapolation_mode): - """ - Perform interpolation over one or more orthogonal coordinates. + """Perform interpolation over one or more orthogonal coordinates. Args: @@ -256,8 +248,7 @@ def extrapolation_mode(self): return self._mode def _account_for_circular(self, points, data): - """ - Extend the given data array, and re-centralise coordinate points + """Extend the given data array, and re-centralise coordinate points for circular (1D) coordinates. """ @@ -287,8 +278,7 @@ def _account_for_inverted(self, data): return data def _interpolate(self, data, interp_points): - """ - Interpolate a data array over N dimensions. + """Interpolate a data array over N dimensions. Create and cache the underlying interpolator instance before invoking it to perform interpolation over the data at the given coordinate point @@ -364,10 +354,7 @@ def _interpolate(self, data, interp_points): return result def _resample_coord(self, sample_points, coord, coord_dims): - """ - Interpolate the given coordinate at the provided sample points. - - """ + """Interpolate the given coordinate at the provided sample points.""" # NB. This section is ripe for improvement: # - Internally self._points() expands coord.points to the same # N-dimensional shape as the cube's data, but it doesn't @@ -391,8 +378,7 @@ def _resample_coord(self, sample_points, coord, coord_dims): return new_coord def _setup(self): - """ - Perform initial start-up configuration and validation based on the + """Perform initial start-up configuration and validation based on the cube and the specified coordinates to be interpolated over. """ @@ -443,8 +429,7 @@ def _setup(self): self._validate() def _validate(self): - """ - Perform all sanity checks to ensure that the interpolation request + """Perform all sanity checks to ensure that the interpolation request over the cube with the specified coordinates is valid and can be performed. @@ -468,8 +453,7 @@ def _validate(self): raise ValueError(msg.format(coord.name())) def _interpolated_dtype(self, dtype): - """ - Determine the minimum base dtype required by the + """Determine the minimum base dtype required by the underlying interpolator. """ @@ -480,8 +464,7 @@ def _interpolated_dtype(self, dtype): return result def _points(self, sample_points, data, data_dims=None): - """ - Interpolate the given data values at the specified list of orthogonal + """Interpolate the given data values at the specified list of orthogonal (coord, points) pairs. Args: @@ -579,8 +562,7 @@ def _points(self, sample_points, data, data_dims=None): return result def __call__(self, sample_points, collapse_scalar=True): - """ - Construct a cube from the specified orthogonal interpolation points. + """Construct a cube from the specified orthogonal interpolation points. Args: diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index ad5a4557da..61b4d31204 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -24,8 +24,7 @@ def _transform_xy_arrays(crs_from, x, y, crs_to): - """ - Transform 2d points between cartopy coordinate reference systems. + """Transform 2d points between cartopy coordinate reference systems. NOTE: copied private function from iris.analysis.cartography. @@ -45,8 +44,7 @@ def _transform_xy_arrays(crs_from, x, y, crs_to): def _regrid_weighted_curvilinear_to_rectilinear__prepare(src_cube, weights, grid_cube): - """ - First (setup) part of 'regrid_weighted_curvilinear_to_rectilinear'. + """First (setup) part of 'regrid_weighted_curvilinear_to_rectilinear'. Check inputs and calculate the sparse regrid matrix and related info. The 'regrid info' returned can be re-used over many cubes. @@ -276,8 +274,7 @@ def _curvilinear_to_rectilinear_regrid_data( dims, regrid_info, ): - """ - Part of 'regrid_weighted_curvilinear_to_rectilinear' which acts on the data. + """Part of 'regrid_weighted_curvilinear_to_rectilinear' which acts on the data. Perform the prepared regrid calculation on an array. @@ -347,8 +344,7 @@ def _curvilinear_to_rectilinear_regrid_data( def _regrid_weighted_curvilinear_to_rectilinear__perform(src_cube, regrid_info): - """ - Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'. + """Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'. Perform the prepared regrid calculation on a single cube. @@ -372,15 +368,13 @@ def _regrid_weighted_curvilinear_to_rectilinear__perform(src_cube, regrid_info): class CurvilinearRegridder: - """ - This class provides support for performing point-in-cell regridding + """This class provides support for performing point-in-cell regridding between a curvilinear source grid and a rectilinear target grid. """ def __init__(self, src_grid_cube, target_grid_cube, weights=None): - """ - Create a regridder for conversions between the source + """Create a regridder for conversions between the source and target grids. Args: @@ -415,8 +409,7 @@ def __init__(self, src_grid_cube, target_grid_cube, weights=None): @staticmethod def _get_horizontal_coord(cube, axis): - """ - Gets the horizontal coordinate on the supplied cube along the + """Gets the horizontal coordinate on the supplied cube along the specified axis. Args: @@ -441,8 +434,7 @@ def _get_horizontal_coord(cube, axis): return coords[0] def __call__(self, src): - """ - Regrid the supplied :class:`~iris.cube.Cube` on to the target grid of + """Regrid the supplied :class:`~iris.cube.Cube` on to the target grid of this :class:`_CurvilinearRegridder`. The given cube must be defined with the same grid as the source @@ -493,15 +485,13 @@ def __call__(self, src): class RectilinearRegridder: - """ - This class provides support for performing nearest-neighbour or + """This class provides support for performing nearest-neighbour or linear regridding between source and target grids. """ def __init__(self, src_grid_cube, tgt_grid_cube, method, extrapolation_mode): - """ - Create a regridder for conversions between the source + """Create a regridder for conversions between the source and target grids. Args: @@ -563,8 +553,7 @@ def extrapolation_mode(self): @staticmethod def _sample_grid(src_coord_system, grid_x_coord, grid_y_coord): - """ - Convert the rectilinear grid coordinates to a curvilinear grid in + """Convert the rectilinear grid coordinates to a curvilinear grid in the source coordinate system. The `grid_x_coord` and `grid_y_coord` must share a common coordinate @@ -610,8 +599,7 @@ def _regrid( method="linear", extrapolation_mode="nanmask", ): - """ - Regrid the given data from the src grid to the sample grid. + """Regrid the given data from the src grid to the sample grid. The result will be a MaskedArray if either/both of: - the source array is a MaskedArray, @@ -857,8 +845,7 @@ def _check_units(self, coord): raise ValueError(msg) def __call__(self, src): - """ - Regrid this :class:`~iris.cube.Cube` on to the target grid of + """Regrid this :class:`~iris.cube.Cube` on to the target grid of this :class:`RectilinearRegridder`. The given cube must be defined with the same grid as the source @@ -974,8 +961,7 @@ def regrid_callback(*args, **kwargs): def _create_cube(data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback): - r""" - Return a new cube for the result of regridding. + r"""Return a new cube for the result of regridding. Returned cube represents the result of regridding the source cube onto the horizontal coordinates (e.g. latitude) of the target cube. All the metadata and coordinates of the result cube are copied from diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index cceb1ba7ab..1300da6d89 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -14,10 +14,7 @@ def _ndim_coords_from_arrays(points, ndim=None): - """ - Convert a tuple of coordinate arrays to a (..., ndim)-shaped array. - - """ + """Convert a tuple of coordinate arrays to a (..., ndim)-shaped array.""" if isinstance(points, tuple) and len(points) == 1: # handle argument tuple points = points[0] @@ -44,8 +41,7 @@ def _ndim_coords_from_arrays(points, ndim=None): # 9aeaafb32/scipy/interpolate/interpolate.py#L1400 class _RegularGridInterpolator: - """ - Interpolation on a regular grid in arbitrary dimensions + """Interpolation on a regular grid in arbitrary dimensions The data must be defined on a regular grid; the grid spacing however may be uneven. Linear and nearest-neighbour interpolation are supported. After @@ -143,8 +139,7 @@ def __init__( self.values = values def __call__(self, xi, method=None): - """ - Interpolation at coordinates + """Interpolation at coordinates Parameters ---------- @@ -162,8 +157,7 @@ def __call__(self, xi, method=None): return self.interp_using_pre_computed_weights(weights) def compute_interp_weights(self, xi, method=None): - """ - Prepare the interpolator for interpolation to the given sample points. + """Prepare the interpolator for interpolation to the given sample points. .. note:: This interface provides the ability to reuse weights on multiple @@ -255,8 +249,7 @@ def compute_interp_weights(self, xi, method=None): return prepared def interp_using_pre_computed_weights(self, computed_weights): - """ - Perform the interpolation using pre-computed interpolation weights. + """Perform the interpolation using pre-computed interpolation weights. .. note:: This interface provides the ability to reuse weights on multiple diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index bcdc1a6b21..4da4e32ad7 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Various utilities and numeric transformations relevant to cartography. +"""Various utilities and numeric transformations relevant to cartography. """ @@ -54,8 +53,7 @@ def wrap_lons(lons, base, period): - """ - Wrap longitude values into the range between base and base+period. + """Wrap longitude values into the range between base and base+period. .. testsetup:: @@ -78,8 +76,7 @@ def wrap_lons(lons, base, period): def unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat): - """ - Convert arrays of rotated-pole longitudes and latitudes to unrotated + """Convert arrays of rotated-pole longitudes and latitudes to unrotated arrays of longitudes and latitudes. The values of ``pole_lon`` and ``pole_lat`` should describe the location of the rotated pole that describes the arrays of rotated-pole longitudes and latitudes. @@ -122,8 +119,7 @@ def unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat): def rotate_pole(lons, lats, pole_lon, pole_lat): - """ - Convert arrays of longitudes and latitudes to arrays of rotated-pole + """Convert arrays of longitudes and latitudes to arrays of rotated-pole longitudes and latitudes. The values of ``pole_lon`` and ``pole_lat`` should describe the rotated pole that the arrays of longitudes and latitudes are to be rotated onto. @@ -187,8 +183,7 @@ def search_for_coord(coord_iterable, coord_name): def _xy_range(cube, mode=None): - """ - Return the x & y range of this Cube. + """Return the x & y range of this Cube. Args: @@ -252,8 +247,7 @@ def _xy_range(cube, mode=None): def get_xy_grids(cube): - """ - Return 2D X and Y points for a given cube. + """Return 2D X and Y points for a given cube. Args: @@ -286,8 +280,7 @@ def get_xy_grids(cube): def get_xy_contiguous_bounded_grids(cube): - """ - Return 2d arrays for x and y bounds. + """Return 2d arrays for x and y bounds. Returns array of shape (n+1, m+1). @@ -354,8 +347,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): def area_weights(cube, normalize=False): - r""" - Returns an array of area weights, with the same dimensions as the cube. + r"""Returns an array of area weights, with the same dimensions as the cube. This is a 2D lat/lon area weights array, repeated over the non lat/lon dimensions. @@ -474,8 +466,7 @@ def area_weights(cube, normalize=False): def cosine_latitude_weights(cube): - r""" - Returns an array of latitude weights, with the same dimensions as + r"""Returns an array of latitude weights, with the same dimensions as the cube. The weights are the cosine of latitude. These are n-dimensional latitude weights repeated over the dimensions @@ -561,8 +552,7 @@ def cosine_latitude_weights(cube): def project(cube, target_proj, nx=None, ny=None): - """ - Nearest neighbour regrid to a specified target projection. + """Nearest neighbour regrid to a specified target projection. Return a new cube that is the result of projecting a cube with 1 or 2 dimensional latitude-longitude coordinates from its coordinate system into @@ -859,8 +849,7 @@ def project(cube, target_proj, nx=None, ny=None): def _transform_xy(crs_from, x, y, crs_to): - """ - Shorthand function to transform 2d points between coordinate + """Shorthand function to transform 2d points between coordinate reference systems. Args: @@ -879,8 +868,7 @@ def _transform_xy(crs_from, x, y, crs_to): def _inter_crs_differentials(crs1, x, y, crs2): - """ - Calculate coordinate partial differentials from crs1 to crs2. + """Calculate coordinate partial differentials from crs1 to crs2. Returns dx2/dx1, dy2/dx1, dx2/dy1 and dy2/dy1, at given locations. @@ -930,8 +918,7 @@ def _inter_crs_differentials(crs1, x, y, crs2): def _crs_distance_differentials(crs, x, y): - """ - Calculate d(distance) / d(x) and ... / d(y) for a coordinate + """Calculate d(distance) / d(x) and ... / d(y) for a coordinate reference system at specified locations. Args: @@ -963,8 +950,7 @@ def _crs_distance_differentials(crs, x, y): def _transform_distance_vectors(u_dist, v_dist, ds, dx2, dy2): - """ - Transform distance vectors from one coordinate reference system to + """Transform distance vectors from one coordinate reference system to another, preserving magnitude and physical direction. Args: @@ -996,8 +982,7 @@ def _transform_distance_vectors(u_dist, v_dist, ds, dx2, dy2): def _transform_distance_vectors_tolerance_mask(src_crs, x, y, tgt_crs, ds, dx2, dy2): - """ - Return a mask that can be applied to data array to mask elements + """Return a mask that can be applied to data array to mask elements where the magnitude of vectors are not preserved due to numerical errors introduced by the transformation between coordinate systems. @@ -1041,8 +1026,7 @@ def _transform_distance_vectors_tolerance_mask(src_crs, x, y, tgt_crs, ds, dx2, def rotate_winds(u_cube, v_cube, target_cs): - r""" - Transform wind vectors to a different coordinate system. + r"""Transform wind vectors to a different coordinate system. The input cubes contain U and V components parallel to the local X and Y directions of the input grid at each point. diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index 719da18690..21c7d05943 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Various utilities related to geometric operations. +"""Various utilities related to geometric operations. .. note:: This module requires :mod:`shapely`. @@ -19,8 +18,7 @@ def _extract_relevant_cube_slice(cube, geometry): - """ - Given a shapely geometry object, this helper method returns + """Given a shapely geometry object, this helper method returns the tuple (subcube, x_coord_of_subcube, y_coord_of_subcube, (min_x_index, min_y_index, max_x_index, max_y_index)) @@ -137,8 +135,7 @@ def _extract_relevant_cube_slice(cube, geometry): def geometry_area_weights(cube, geometry, normalize=False): - """ - Returns the array of weights corresponding to the area of overlap between + """Returns the array of weights corresponding to the area of overlap between the cells of cube's horizontal grid, and the given shapely geometry. The returned array is suitable for use with :const:`iris.analysis.MEAN`. diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 62db621ec3..1042b145de 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Basic mathematical and statistical operations. +"""Basic mathematical and statistical operations. """ @@ -33,8 +32,7 @@ @lru_cache(maxsize=128, typed=True) def _output_dtype(op, first_dtype, second_dtype=None, in_place=False): - """ - Get the numpy dtype corresponding to the result of applying a unary or + """Get the numpy dtype corresponding to the result of applying a unary or binary operation to arguments of specified dtype. Args: @@ -76,8 +74,7 @@ def _output_dtype(op, first_dtype, second_dtype=None, in_place=False): def _get_dtype(operand): - """ - Get the numpy dtype corresponding to the numeric data in the object + """Get the numpy dtype corresponding to the numeric data in the object provided. Args: @@ -94,8 +91,7 @@ def _get_dtype(operand): def abs(cube, in_place=False): - """ - Calculate the absolute values of the data in the Cube provided. + """Calculate the absolute values of the data in the Cube provided. Args: @@ -123,8 +119,7 @@ def abs(cube, in_place=False): def intersection_of_cubes(cube, other_cube): - """ - Return the two Cubes of intersection given two Cubes. + """Return the two Cubes of intersection given two Cubes. .. note:: The intersection of cubes function will ignore all single valued coordinates in checking the intersection. @@ -212,8 +207,7 @@ def _assert_is_cube(cube): @_lenient_client(services=SERVICES) def add(cube, other, dim=None, in_place=False): - """ - Calculate the sum of two cubes, or the sum of a cube and a coordinate or + """Calculate the sum of two cubes, or the sum of a cube and a coordinate or array or scalar value. When summing two cubes, they must both have the same coordinate systems and @@ -268,8 +262,7 @@ def add(cube, other, dim=None, in_place=False): @_lenient_client(services=SERVICES) def subtract(cube, other, dim=None, in_place=False): - """ - Calculate the difference between two cubes, or the difference between + """Calculate the difference between two cubes, or the difference between a cube and a coordinate or array or scalar value. When differencing two cubes, they must both have the same coordinate systems @@ -331,8 +324,7 @@ def _add_subtract_common( dim=None, in_place=False, ): - """ - Function which shares common code between addition and subtraction + """Function which shares common code between addition and subtraction of cubes. operation_function - function which does the operation @@ -375,8 +367,7 @@ def _add_subtract_common( @_lenient_client(services=SERVICES) def multiply(cube, other, dim=None, in_place=False): - """ - Calculate the product of two cubes, or the product of a cube and a coordinate + """Calculate the product of two cubes, or the product of a cube and a coordinate or array or scalar value. When multiplying two cubes, they must both have the same coordinate systems @@ -443,8 +434,7 @@ def multiply(cube, other, dim=None, in_place=False): def _inplace_common_checks(cube, other, math_op): - """ - Check whether an inplace math operation can take place between `cube` and + """Check whether an inplace math operation can take place between `cube` and `other`. It cannot if `cube` has integer data and `other` has float data as the operation will always produce float data that cannot be 'safely' cast back to the integer data of `cube`. @@ -463,8 +453,7 @@ def _inplace_common_checks(cube, other, math_op): @_lenient_client(services=SERVICES) def divide(cube, other, dim=None, in_place=False): - """ - Calculate the ratio of two cubes, or the ratio of a cube and a coordinate + """Calculate the ratio of two cubes, or the ratio of a cube and a coordinate or array or scalar value. When dividing a cube by another cube, they must both have the same coordinate @@ -537,8 +526,7 @@ def divide(cube, other, dim=None, in_place=False): def exponentiate(cube, exponent, in_place=False): - """ - Returns the result of the given cube to the power of a scalar. + """Returns the result of the given cube to the power of a scalar. Args: @@ -593,8 +581,7 @@ def power(data, out=None): def exp(cube, in_place=False): - """ - Calculate the exponential (exp(x)) of the cube. + """Calculate the exponential (exp(x)) of the cube. Args: @@ -628,8 +615,7 @@ def exp(cube, in_place=False): def log(cube, in_place=False): - """ - Calculate the natural logarithm (base-e logarithm) of the cube. + """Calculate the natural logarithm (base-e logarithm) of the cube. Args: @@ -663,8 +649,7 @@ def log(cube, in_place=False): def log2(cube, in_place=False): - """ - Calculate the base-2 logarithm of the cube. + """Calculate the base-2 logarithm of the cube. Args: @@ -694,8 +679,7 @@ def log2(cube, in_place=False): def log10(cube, in_place=False): - """ - Calculate the base-10 logarithm of the cube. + """Calculate the base-10 logarithm of the cube. Args: @@ -725,8 +709,7 @@ def log10(cube, in_place=False): def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place=False): - """ - Apply a `numpy universal function + """Apply a `numpy universal function `_ to a cube or pair of cubes. @@ -844,8 +827,7 @@ def _binary_op_common( in_place=False, sanitise_metadata=True, ): - """ - Function which shares common code between binary operations. + """Function which shares common code between binary operations. operation_function - function which does the operation (e.g. numpy.divide) @@ -988,8 +970,7 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): def _sanitise_metadata(cube, unit): - """ - As part of the maths metadata contract, clear the necessary or + """As part of the maths metadata contract, clear the necessary or unsupported metadata from the resultant cube of the maths operation. """ @@ -1063,13 +1044,10 @@ def _math_op_common( class IFunc: - """ - :class:`IFunc` class for functions that can be applied to an iris cube. - """ + """:class:`IFunc` class for functions that can be applied to an iris cube.""" def __init__(self, data_func, units_func): - """ - Create an ifunc from a data function and units function. + """Create an ifunc from a data function and units function. Args: @@ -1192,8 +1170,7 @@ def __call__( new_name=None, **kwargs_data_func, ): - """ - Applies the ifunc to the cube(s). + """Applies the ifunc to the cube(s). Args: diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 530be13391..e3a01f6933 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Statistical operations between cubes. +"""Statistical operations between cubes. """ @@ -22,8 +21,7 @@ def pearsonr( mdtol=1.0, common_mask=False, ): - """ - Calculate the Pearson's r correlation coefficient over specified + """Calculate the Pearson's r correlation coefficient over specified dimensions. Args: diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 42f47abf12..53dcc0ceac 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Defines a Trajectory class, and a routine to extract a sub-cube along a +"""Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory. """ @@ -39,8 +38,7 @@ class Trajectory: """A series of given waypoints with pre-calculated sample points.""" def __init__(self, waypoints, sample_count=10): - """ - Defines a trajectory using a sequence of waypoints. + """Defines a trajectory using a sequence of waypoints. For example:: @@ -116,8 +114,7 @@ def __repr__(self): ) def _get_interp_points(self): - """ - Translate `self.sampled_points` to the format expected by the + """Translate `self.sampled_points` to the format expected by the interpolator. Returns: @@ -132,8 +129,7 @@ def _get_interp_points(self): return [(k, v) for k, v in points.items()] def _src_cube_anon_dims(self, cube): - """ - A helper method to locate the index of anonymous dimensions on the + """A helper method to locate the index of anonymous dimensions on the interpolation target, ``cube``. Returns: @@ -144,8 +140,7 @@ def _src_cube_anon_dims(self, cube): return list(set(range(cube.ndim)) - set(named_dims)) def interpolate(self, cube, method=None): - """ - Calls :func:`~iris.analysis.trajectory.interpolate` to interpolate + """Calls :func:`~iris.analysis.trajectory.interpolate` to interpolate ``cube`` on the defined trajectory. Assumes that the coordinate names supplied in the waypoints @@ -187,8 +182,7 @@ def interpolate(self, cube, method=None): def interpolate(cube, sample_points, method=None): - """ - Extract a sub-cube at the given n-dimensional points. + """Extract a sub-cube at the given n-dimensional points. Args: @@ -487,8 +481,7 @@ def _ll_to_cart(lon, lat): def _cartesian_sample_points(sample_points, sample_point_coord_names): - """ - Replace geographic lat/lon with cartesian xyz. + """Replace geographic lat/lon with cartesian xyz. Generates coords suitable for nearest point calculations with `scipy.spatial.cKDTree`. @@ -538,8 +531,7 @@ def _cartesian_sample_points(sample_points, sample_point_coord_names): def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): - """ - Returns the indices to select the data value(s) closest to the given + """Returns the indices to select the data value(s) closest to the given coordinate point values. 'sample_points' is of the form [[coord-or-coord-name, point-value(s)]*]. @@ -706,8 +698,7 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): class UnstructuredNearestNeigbourRegridder: - """ - Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate` + """Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate` with given source and target grids. This is the type used by the :class:`~iris.analysis.UnstructuredNearest` @@ -718,8 +709,7 @@ class UnstructuredNearestNeigbourRegridder: # TODO: cache the necessary bits of the operation so reuse can actually # be more efficient. def __init__(self, src_cube, target_grid_cube): - """ - A nearest-neighbour regridder to perform regridding from the source + """A nearest-neighbour regridder to perform regridding from the source grid to the target grid. This can then be applied to any source data with the same structure as diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py index 8526c549c3..983238f17d 100644 --- a/lib/iris/common/__init__.py +++ b/lib/iris/common/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -A package for provisioning common Iris infrastructure. +"""A package for provisioning common Iris infrastructure. """ diff --git a/lib/iris/common/_split_attribute_dicts.py b/lib/iris/common/_split_attribute_dicts.py index 3927974053..95dbcbb7b3 100644 --- a/lib/iris/common/_split_attribute_dicts.py +++ b/lib/iris/common/_split_attribute_dicts.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute +"""Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute dictionaries. The idea here is to convert a split-dictionary into a "plain" one for calculations, @@ -22,8 +21,7 @@ def _convert_splitattrs_to_pairedkeys_dict(dic): - """ - Convert a split-attributes dictionary to a "normal" dict. + """Convert a split-attributes dictionary to a "normal" dict. Transform a :class:`~iris.cube.CubeAttributesDict` "split" attributes dictionary into a 'normal' :class:`dict`, with paired keys of the form ('global', name) or @@ -51,8 +49,7 @@ def _global_then_local_items(dic): def _convert_pairedkeys_dict_to_splitattrs(dic): - """ - Convert an input with global/local paired keys back into a split-attrs dict. + """Convert an input with global/local paired keys back into a split-attrs dict. For now, this is always and only a :class:`iris.cube.CubeAttrsDict`. """ @@ -70,8 +67,7 @@ def _convert_pairedkeys_dict_to_splitattrs(dic): def adjust_for_split_attribute_dictionaries(operation): - """ - Decorator to make a function of attribute-dictionaries work with split attributes. + """Decorator to make a function of attribute-dictionaries work with split attributes. The wrapped function of attribute-dictionaries is currently always one of "equals", "combine" or "difference", with signatures like : diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index f2e3ec588b..614060b9bf 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides the infrastructure to support lenient client/service behaviour. +"""Provides the infrastructure to support lenient client/service behaviour. """ @@ -31,8 +30,7 @@ def _lenient_client(*dargs, services=None): - """ - Decorator that allows a client function/method to declare at runtime that + """Decorator that allows a client function/method to declare at runtime that it is executing and requires lenient behaviour from a prior registered lenient service function/method. @@ -93,8 +91,7 @@ def func(): @wraps(func) def lenient_client_inner_naked(*args, **kwargs): - """ - Closure wrapper function to register the wrapped function/method + """Closure wrapper function to register the wrapped function/method as active at runtime before executing it. """ @@ -114,8 +111,7 @@ def lenient_client_inner_naked(*args, **kwargs): def lenient_client_outer(func): @wraps(func) def lenient_client_inner(*args, **kwargs): - """ - Closure wrapper function to register the wrapped function/method + """Closure wrapper function to register the wrapped function/method as active at runtime before executing it. """ @@ -131,8 +127,7 @@ def lenient_client_inner(*args, **kwargs): def _lenient_service(*dargs): - """ - Decorator that allows a function/method to declare that it supports lenient + """Decorator that allows a function/method to declare that it supports lenient behaviour as a service. Registration is at Python interpreter parse time. @@ -198,8 +193,7 @@ def lenient_service_outer(func): def _qualname(func): - """ - Return the fully qualified function/method string name. + """Return the fully qualified function/method string name. Args: @@ -222,8 +216,7 @@ def _qualname(func): class Lenient(threading.local): def __init__(self, **kwargs): - """ - A container for managing the run-time lenient features and options. + """A container for managing the run-time lenient features and options. Kwargs: @@ -287,8 +280,7 @@ def _init(self): @contextmanager def context(self, **kwargs): - """ - Return a context manager which allows temporary modification of the + """Return a context manager which allows temporary modification of the lenient option state within the scope of the context manager. On entry to the context manager, all provided keyword arguments are @@ -325,8 +317,7 @@ def configure_state(state): class _Lenient(threading.local): def __init__(self, *args, **kwargs): - """ - A container for managing the run-time lenient services and client + """A container for managing the run-time lenient services and client options for pre-defined functions/methods. Args: @@ -361,8 +352,7 @@ def __init__(self, *args, **kwargs): self.register_client(client, services) def __call__(self, func): - """ - Determine whether it is valid for the function/method to provide a + """Determine whether it is valid for the function/method to provide a lenient service at runtime to the actively executing lenient client. Args: @@ -440,8 +430,7 @@ def __setitem__(self, name, value): @contextmanager def context(self, *args, **kwargs): - """ - Return a context manager which allows temporary modification of + """Return a context manager which allows temporary modification of the lenient option state for the active thread. On entry to the context manager, all provided keyword arguments are @@ -515,8 +504,7 @@ def enable(self): @enable.setter def enable(self, state): - """ - Set the activate state of the lenient services. + """Set the activate state of the lenient services. Setting the state to `False` disables all lenient services, and setting the state to `True` enables all lenient services. @@ -534,8 +522,7 @@ def enable(self, state): self.__dict__["enable"] = state def register_client(self, func, services, append=False): - """ - Add the provided mapping of lenient client function/method to + """Add the provided mapping of lenient client function/method to required lenient service function/methods. Args: @@ -578,8 +565,7 @@ def register_client(self, func, services, append=False): self.__dict__[func] = services def register_service(self, func): - """ - Add the provided function/method as providing a lenient service and + """Add the provided function/method as providing a lenient service and activate it. Args: @@ -600,8 +586,7 @@ def register_service(self, func): self.__dict__[func] = True def unregister_client(self, func): - """ - Remove the provided function/method as a lenient client using lenient services. + """Remove the provided function/method as a lenient client using lenient services. Args: @@ -627,8 +612,7 @@ def unregister_client(self, func): raise ValueError(emsg) def unregister_service(self, func): - """ - Remove the provided function/method as providing a lenient service. + """Remove the provided function/method as providing a lenient service. Args: diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index aaebcdf66e..3b2e909e9e 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides the infrastructure to support the common metadata API. +"""Provides the infrastructure to support the common metadata API. """ @@ -51,8 +50,7 @@ def hexdigest(item): - """ - Calculate a hexadecimal string hash representation of the provided item. + """Calculate a hexadecimal string hash representation of the provided item. Calculates a 64-bit non-cryptographic hash of the provided item, using the extremely fast ``xxhash`` hashing algorithm, and returns the hexdigest @@ -96,8 +94,7 @@ def hexdigest(item): class _NamedTupleMeta(ABCMeta): - """ - Meta-class to support the convenience of creating a namedtuple from + """Meta-class to support the convenience of creating a namedtuple from names/members of the metadata class hierarchy. """ @@ -139,10 +136,7 @@ def __new__(mcs, name, bases, namespace): class BaseMetadata(metaclass=_NamedTupleMeta): - """ - Container for common metadata. - - """ + """Container for common metadata.""" DEFAULT_NAME = "unknown" # the fall-back name for metadata identity @@ -158,8 +152,7 @@ class BaseMetadata(metaclass=_NamedTupleMeta): @lenient_service def __eq__(self, other): - """ - Determine whether the associated metadata members are equivalent. + """Determine whether the associated metadata members are equivalent. Args: @@ -245,8 +238,7 @@ def __str__(self): return f"{type(self).__name__}({', '.join(field_strings)})" def _api_common(self, other, func_service, func_operation, action, lenient=None): - """ - Common entry-point for lenient metadata API methods. + """Common entry-point for lenient metadata API methods. Args: @@ -318,8 +310,7 @@ def func(field): return values def _combine_lenient(self, other): - """ - Perform lenient combination of metadata members. + """Perform lenient combination of metadata members. Args: @@ -400,8 +391,7 @@ def _combine_strict_attributes(left, right): return result def _compare_lenient(self, other): - """ - Perform lenient equality of metadata members. + """Perform lenient equality of metadata members. Args: @@ -492,8 +482,7 @@ def func(field): return values def _difference_lenient(self, other): - """ - Perform lenient difference of metadata members. + """Perform lenient difference of metadata members. Args: @@ -587,8 +576,7 @@ def _is_attributes(field, left, right): @lenient_service def combine(self, other, lenient=None): - """ - Return a new metadata instance created by combining each of the + """Return a new metadata instance created by combining each of the associated metadata members. Args: @@ -613,8 +601,7 @@ def combine(self, other, lenient=None): @lenient_service def difference(self, other, lenient=None): - """ - Return a new metadata instance created by performing a difference + """Return a new metadata instance created by performing a difference comparison between each of the associated metadata members. A metadata member returned with a value of "None" indicates that there @@ -646,8 +633,7 @@ def difference(self, other, lenient=None): @lenient_service def equal(self, other, lenient=None): - """ - Determine whether the associated metadata members are equivalent. + """Determine whether the associated metadata members are equivalent. Args: @@ -671,8 +657,7 @@ def equal(self, other, lenient=None): @classmethod def from_metadata(cls, other): - """ - Convert the provided metadata instance from a different type + """Convert the provided metadata instance from a different type to this metadata type, using only the relevant metadata members. Non-common metadata members are set to ``None``. @@ -699,8 +684,7 @@ def from_metadata(cls, other): return result def name(self, default=None, token=False): - """ - Returns a string name representing the identity of the metadata. + """Returns a string name representing the identity of the metadata. First it tries standard name, then it tries the long name, then the NetCDF variable name, before falling-back to a default value, @@ -742,8 +726,7 @@ def _check(item): @classmethod def token(cls, name): - """ - Determine whether the provided name is a valid NetCDF name and thus + """Determine whether the provided name is a valid NetCDF name and thus safe to represent a single parsable token. Args: @@ -763,10 +746,7 @@ def token(cls, name): class AncillaryVariableMetadata(BaseMetadata): - """ - Metadata container for a :class:`~iris.coords.AncillaryVariableMetadata`. - - """ + """Metadata container for a :class:`~iris.coords.AncillaryVariableMetadata`.""" __slots__ = () @@ -792,10 +772,7 @@ def equal(self, other, lenient=None): class CellMeasureMetadata(BaseMetadata): - """ - Metadata container for a :class:`~iris.coords.CellMeasure`. - - """ + """Metadata container for a :class:`~iris.coords.CellMeasure`.""" _members = "measure" @@ -807,8 +784,7 @@ def __eq__(self, other): return super().__eq__(other) def _combine_lenient(self, other): - """ - Perform lenient combination of metadata members for cell measures. + """Perform lenient combination of metadata members for cell measures. Args: @@ -829,8 +805,7 @@ def _combine_lenient(self, other): return result def _compare_lenient(self, other): - """ - Perform lenient equality of metadata members for cell measures. + """Perform lenient equality of metadata members for cell measures. Args: @@ -851,8 +826,7 @@ def _compare_lenient(self, other): return result def _difference_lenient(self, other): - """ - Perform lenient difference of metadata members for cell measures. + """Perform lenient difference of metadata members for cell measures. Args: @@ -889,10 +863,7 @@ def equal(self, other, lenient=None): class CoordMetadata(BaseMetadata): - """ - Metadata container for a :class:`~iris.coords.Coord`. - - """ + """Metadata container for a :class:`~iris.coords.Coord`.""" _members = ("coord_system", "climatological") @@ -935,8 +906,7 @@ def _sort_key(item): return _sort_key(self) < _sort_key(other) def _combine_lenient(self, other): - """ - Perform lenient combination of metadata members for coordinates. + """Perform lenient combination of metadata members for coordinates. Args: @@ -964,8 +934,7 @@ def func(field): return result def _compare_lenient(self, other): - """ - Perform lenient equality of metadata members for coordinates. + """Perform lenient equality of metadata members for coordinates. Args: @@ -991,8 +960,7 @@ def _compare_lenient(self, other): return result def _difference_lenient(self, other): - """ - Perform lenient difference of metadata members for coordinates. + """Perform lenient difference of metadata members for coordinates. Args: @@ -1057,10 +1025,7 @@ def equal(self, other, lenient=None): class CubeMetadata(BaseMetadata): - """ - Metadata container for a :class:`~iris.cube.Cube`. - - """ + """Metadata container for a :class:`~iris.cube.Cube`.""" _members = "cell_methods" @@ -1090,8 +1055,7 @@ def _sort_key(item): return _sort_key(self) < _sort_key(other) def _combine_lenient(self, other): - """ - Perform lenient combination of metadata members for cubes. + """Perform lenient combination of metadata members for cubes. Args: @@ -1111,8 +1075,7 @@ def _combine_lenient(self, other): return result def _compare_lenient(self, other): - """ - Perform lenient equality of metadata members for cubes. + """Perform lenient equality of metadata members for cubes. Args: @@ -1131,8 +1094,7 @@ def _compare_lenient(self, other): return result def _difference_lenient(self, other): - """ - Perform lenient difference of metadata members for cubes. + """Perform lenient difference of metadata members for cubes. Args: @@ -1157,8 +1119,7 @@ def _difference_lenient(self, other): @property def _names(self): - """ - A tuple containing the value of each name participating in the identity + """A tuple containing the value of each name participating in the identity of a :class:`iris.cube.Cube`. This includes the standard name, long name, NetCDF variable name, and the STASH from the attributes dictionary. @@ -1268,10 +1229,7 @@ def _difference_strict_attributes(left, right): class DimCoordMetadata(CoordMetadata): - """ - Metadata container for a :class:`~iris.coords.DimCoord` - - """ + """Metadata container for a :class:`~iris.coords.DimCoord`""" # The "circular" member is stateful only, and does not participate # in lenient/strict equivalence. @@ -1372,8 +1330,7 @@ def metadata_filter( attributes=None, axis=None, ): - """ - Filter a collection of objects by their metadata to fit the given metadata + """Filter a collection of objects by their metadata to fit the given metadata criteria. Criteria can be either specific properties or other objects with metadata @@ -1537,8 +1494,7 @@ def __ne__(self, other): return match def __reduce__(self): - """ - Dynamically created classes at runtime cannot be pickled, due to not + """Dynamically created classes at runtime cannot be pickled, due to not being defined at the top level of a module. As a result, we require to use the __reduce__ interface to allow 'pickle' to recreate this class instance, and dump and load instance state successfully. @@ -1597,8 +1553,7 @@ def values(self): def metadata_manager_factory(cls, **kwargs): - """ - A class instance factory function responsible for manufacturing + """A class instance factory function responsible for manufacturing metadata instances dynamically at runtime. The factory instances returned by the factory are capable of managing diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index 56b9263555..08ad224ad6 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides common metadata mixin behaviour. +"""Provides common metadata mixin behaviour. """ @@ -50,8 +49,7 @@ def _get_valid_standard_name(name): class LimitedAttributeDict(dict): - """ - A specialised 'dict' subclass, which forbids (errors) certain attribute names. + """A specialised 'dict' subclass, which forbids (errors) certain attribute names. Used for the attribute dictionaries of all Iris data objects (that is, :class:`CFVariableMixin` and its subclasses). @@ -147,8 +145,7 @@ def name(self, default=None, token=None): return self._metadata_manager.name(default=default, token=token) def rename(self, name): - """ - Changes the human-readable name. + """Changes the human-readable name. If 'name' is a valid standard name it will assign it to :attr:`standard_name`, otherwise it will assign it to diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 8c9794f7f8..045dc7b549 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides the infrastructure to support the analysis, identification and +"""Provides the infrastructure to support the analysis, identification and combination of metadata common between two :class:`~iris.cube.Cube` operands into a single resultant :class:`~iris.cube.Cube`, which will be auto-transposed, and with the appropriate broadcast shape. @@ -98,8 +97,7 @@ def create_coord(self, metadata): class Resolve: - """ - At present, :class:`~iris.common.resolve.Resolve` is used by Iris solely + """At present, :class:`~iris.common.resolve.Resolve` is used by Iris solely during cube maths to combine a left-hand :class:`~iris.cube.Cube` operand and a right-hand :class:`~iris.cube.Cube` operand into a resultant :class:`~iris.cube.Cube` with common metadata, suitably auto-transposed @@ -210,8 +208,7 @@ class Resolve: """ def __init__(self, lhs=None, rhs=None): - """ - Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and + """Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` :class:`~iris.cube.Cube` operand to determine the metadata that is common between them, and the auto-transposed, broadcast shape of the resultant :class:`~iris.cube.Cube`. @@ -326,8 +323,7 @@ def __init__(self, lhs=None, rhs=None): self(lhs, rhs) def __call__(self, lhs, rhs): - """ - Resolve the ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` + """Resolve the ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` :class:`~iris.cube.Cube` operand metadata. Involves determining all the common coordinate metadata shared between @@ -383,8 +379,7 @@ def __call__(self, lhs, rhs): return self def _as_compatible_cubes(self): - """ - Determine whether the ``src`` and ``tgt`` :class:`~iris.cube.Cube` can + """Determine whether the ``src`` and ``tgt`` :class:`~iris.cube.Cube` can be transposed and/or broadcast successfully together. If compatible, the ``_broadcast_shape`` of the resultant resolved cube is @@ -493,8 +488,7 @@ def _aux_coverage( common_aux_metadata, common_scalar_metadata, ): - """ - Determine the dimensions covered by each of the local and common + """Determine the dimensions covered by each of the local and common auxiliary coordinates of the provided :class:`~iris.cube.Cube`. The cube dimensions not covered by any of the auxiliary coordinates is @@ -564,8 +558,7 @@ def _aux_coverage( @staticmethod def _aux_mapping(src_coverage, tgt_coverage): - """ - Establish the mapping of dimensions from the ``src`` to ``tgt`` + """Establish the mapping of dimensions from the ``src`` to ``tgt`` :class:`~iris.cube.Cube` using the auxiliary coordinate metadata common between each of the operands. @@ -631,8 +624,7 @@ def _aux_mapping(src_coverage, tgt_coverage): @staticmethod def _categorise_items(cube): - """ - Inspect the provided :class:`~iris.cube.Cube` and group its + """Inspect the provided :class:`~iris.cube.Cube` and group its coordinates and associated metadata into dimension, auxiliary and scalar categories. @@ -679,8 +671,7 @@ def _create_prepared_item( bounds=None, container=None, ): - """ - Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` + """Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` containing the data and metadata required to construct and attach a coordinate to the resultant resolved cube. @@ -805,8 +796,7 @@ def _show(items, heading): @staticmethod def _dim_coverage(cube, cube_items_dim, common_dim_metadata): - """ - Determine the dimensions covered by each of the local and common + """Determine the dimensions covered by each of the local and common dimension coordinates of the provided :class:`~iris.cube.Cube`. The cube dimensions not covered by any of the dimension coordinates is @@ -857,8 +847,7 @@ def _dim_coverage(cube, cube_items_dim, common_dim_metadata): @staticmethod def _dim_mapping(src_coverage, tgt_coverage): - """ - Establish the mapping of dimensions from the ``src`` to ``tgt`` + """Establish the mapping of dimensions from the ``src`` to ``tgt`` :class:`~iris.cube.Cube` using the dimension coordinate metadata common between each of the operands. @@ -915,8 +904,7 @@ def _free_mapping( src_aux_coverage, tgt_aux_coverage, ): - """ - Attempt to update the :attr:`~iris.common.resolve.Resolve.mapping` with + """Attempt to update the :attr:`~iris.common.resolve.Resolve.mapping` with ``src`` to ``tgt`` :class:`~iris.cube.Cube` mappings from unmapped ``src`` dimensions that are free from coordinate metadata coverage to ``tgt`` dimensions that have local metadata coverage (i.e., is not common between @@ -1091,8 +1079,7 @@ def _pop(item, items): logger.debug(f"mapping free dimensions gives, mapping={self.mapping}") def _metadata_coverage(self): - """ - Using the pre-categorised metadata of the cubes, determine the dimensions + """Using the pre-categorised metadata of the cubes, determine the dimensions covered by their associated dimension and auxiliary coordinates, and which dimensions are free of metadata coverage. @@ -1138,8 +1125,7 @@ def _metadata_coverage(self): ) def _metadata_mapping(self): - """ - Ensure that each ``src`` :class:`~iris.cube.Cube` dimension is mapped to an associated + """Ensure that each ``src`` :class:`~iris.cube.Cube` dimension is mapped to an associated ``tgt`` :class:`~iris.cube.Cube` dimension using the common dim and aux coordinate metadata. If the common metadata does not result in a full mapping of ``src`` to ``tgt`` dimensions @@ -1247,8 +1233,7 @@ def _metadata_mapping(self): self._as_compatible_cubes() def _metadata_prepare(self): - """ - Populate the :attr:`~iris.common.resolve.Resolve.prepared_category` and + """Populate the :attr:`~iris.common.resolve.Resolve.prepared_category` and :attr:`~iris.common.resolve.Resolve.prepared_factories` with the necessary metadata to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1308,8 +1293,7 @@ def _metadata_prepare(self): self._prepare_factory_payload(src_cube, src_category_local) def _metadata_resolve(self): - """ - Categorise the coordinate metadata of the cubes into three distinct + """Categorise the coordinate metadata of the cubes into three distinct groups; metadata from coordinates only available (local) on the LHS cube, metadata from coordinates only available (local) on the RHS cube, and metadata from coordinates common to both the LHS and RHS @@ -1418,8 +1402,7 @@ def _prepare_common_aux_payload( prepared_items, ignore_mismatch=None, ): - """ - Populate the ``prepared_items`` with a :class:`~iris.common.resolve._PreparedItem` containing + """Populate the ``prepared_items`` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each auxiliary coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1547,8 +1530,7 @@ def _prepare_common_aux_payload( def _prepare_common_dim_payload( self, src_coverage, tgt_coverage, ignore_mismatch=None ): - """ - Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each :class:`~iris.coords.DimCoord` to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1609,8 +1591,7 @@ def _prepare_common_dim_payload( def _get_prepared_item( self, metadata, category_local, from_src=True, from_local=False ): - """ - Find the :attr:`~iris.common.resolve._PreparedItem` from the + """Find the :attr:`~iris.common.resolve._PreparedItem` from the :attr:`~iris.common.resolve.Resolve.prepared_category` that matches the provided ``metadata``. Alternatively, the ``category_local`` is searched to find a :class:`~iris.common.resolve._Item` @@ -1689,8 +1670,7 @@ def _get_prepared_item( return result def _prepare_factory_payload(self, cube, category_local, from_src=True): - """ - Populate the :attr:`~iris.common.resolve.Resolve.prepared_factories` with a :class:`~iris.common.resolve._PreparedFactory` + """Populate the :attr:`~iris.common.resolve.Resolve.prepared_factories` with a :class:`~iris.common.resolve._PreparedFactory` containing the necessary metadata for each ``src`` and/or ``tgt`` auxiliary factory to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1768,8 +1748,7 @@ def _prepare_factory_payload(self, cube, category_local, from_src=True): logger.debug(dmsg) def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): - """ - Populate the ``items_aux`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Populate the ``items_aux`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local auxiliary coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1841,8 +1820,7 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): logger.debug(dmsg) def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): - """ - Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local :class:`~iris.coords.DimCoord` to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1918,8 +1896,7 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): self.prepared_category.items_dim.append(prepared_item) def _prepare_local_payload_scalar(self, src_aux_coverage, tgt_aux_coverage): - """ - Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local scalar coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1972,8 +1949,7 @@ def _prepare_local_payload( tgt_dim_coverage, tgt_aux_coverage, ): - """ - Populate the :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a + """Populate the :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata from the ``src`` and/or ``tgt`` :class:`~iris.cube.Cube` for each coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -2005,8 +1981,7 @@ def _prepare_local_payload( def _prepare_points_and_bounds( self, src_coord, tgt_coord, src_dims, tgt_dims, ignore_mismatch=None ): - """ - Compare the points and bounds of the ``src`` and ``tgt`` coordinates to ensure + """Compare the points and bounds of the ``src`` and ``tgt`` coordinates to ensure that they are equivalent, taking into account broadcasting when appropriate. .. note:: @@ -2283,8 +2258,7 @@ def _tgt_cube_prepare(self, data): cube.remove_ancillary_variable(av) def cube(self, data, in_place=False): - """ - Create the resultant :class:`~iris.cube.Cube` from the resolved ``lhs`` + """Create the resultant :class:`~iris.cube.Cube` from the resolved ``lhs`` and ``rhs`` :class:`~iris.cube.Cube` operands, using the provided ``data``. @@ -2449,8 +2423,7 @@ def cube(self, data, in_place=False): @property def mapped(self): - """ - Boolean state representing whether **all** ``src`` :class:`~iris.cube.Cube` + """Boolean state representing whether **all** ``src`` :class:`~iris.cube.Cube` dimensions have been associated with relevant ``tgt`` :class:`~iris.cube.Cube` dimensions. @@ -2528,8 +2501,7 @@ def mapped(self): @property def shape(self): - """ - Proposed shape of the final resolved cube given the ``lhs`` + """Proposed shape of the final resolved cube given the ``lhs`` :class:`~iris.cube.Cube` operand and the ``rhs`` :class:`~iris.cube.Cube` operand. diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index 2515274b8e..dbf27ea86e 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Cube functions for coordinate categorisation. +"""Cube functions for coordinate categorisation. All the functions provided here add a new coordinate to a cube. * The function :func:`add_categorised_coord` performs a generic @@ -24,8 +23,7 @@ def add_categorised_coord(cube, name, from_coord, category_function, units="1"): - """ - Add a new coordinate to a cube, by categorising an existing one. + """Add a new coordinate to a cube, by categorising an existing one. Make a new :class:`iris.coords.AuxCoord` from mapped values, and add it to the cube. @@ -90,8 +88,7 @@ def vectorised_fn(*args): # Private "helper" function def _pt_date(coord, time): - """ - Return the datetime of a time-coordinate point. + """Return the datetime of a time-coordinate point. Args: @@ -151,8 +148,7 @@ def add_day_of_month(cube, coord, name="day_of_month"): def add_day_of_year(cube, coord, name="day_of_year"): - """ - Add a categorical day-of-year coordinate, values 1..365 + """Add a categorical day-of-year coordinate, values 1..365 (1..366 in leap years). """ @@ -212,8 +208,7 @@ def add_hour(cube, coord, name="hour"): def _months_in_season(season): - """ - Returns a list of month numbers corresponding to each month in the + """Returns a list of month numbers corresponding to each month in the given season. """ @@ -263,8 +258,7 @@ def _validate_seasons(seasons): def _month_year_adjusts(seasons, use_year_at_season_start=False): - """ - Compute the year adjustments required for each month. + """Compute the year adjustments required for each month. These adjustments ensure that no season spans two years by assigning months to the **next** year (use_year_at_season_start is False) or the @@ -307,8 +301,7 @@ def _month_season_numbers(seasons): def add_season(cube, coord, name="season", seasons=("djf", "mam", "jja", "son")): - """ - Add a categorical season-of-year coordinate, with user specified + """Add a categorical season-of-year coordinate, with user specified seasons. Args: @@ -347,8 +340,7 @@ def _season(coord, value): def add_season_number( cube, coord, name="season_number", seasons=("djf", "mam", "jja", "son") ): - """ - Add a categorical season-of-year coordinate, values 0..N-1 where + """Add a categorical season-of-year coordinate, values 0..N-1 where N is the number of user specified seasons. Args: @@ -391,8 +383,7 @@ def add_season_year( seasons=("djf", "mam", "jja", "son"), use_year_at_season_start=False, ): - """ - Add a categorical year-of-season coordinate, with user specified seasons. + """Add a categorical year-of-season coordinate, with user specified seasons. Parameters ---------- @@ -432,8 +423,7 @@ def _season_year(coord, value): def add_season_membership(cube, coord, season, name="season_membership"): - """ - Add a categorical season membership coordinate for a user specified + """Add a categorical season membership coordinate for a user specified season. The coordinate has the value True for every time that is within the diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index ca8bf173f8..0ecfae0fb7 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Definitions of coordinate systems. +"""Definitions of coordinate systems. """ @@ -49,16 +48,12 @@ def _float_or_None(arg): class CoordSystem(metaclass=ABCMeta): - """ - Abstract base class for coordinate systems. - - """ + """Abstract base class for coordinate systems.""" grid_mapping_name = None def __eq__(self, other): - """ - Override equality + """Override equality The `_globe` and `_crs` attributes are not compared because they are cached properties and completely derived from other attributes. The @@ -122,8 +117,7 @@ def _ellipsoid_to_globe(ellipsoid, globe_default): @abstractmethod def as_cartopy_crs(self): - """ - Return a cartopy CRS representing our native coordinate + """Return a cartopy CRS representing our native coordinate system. """ @@ -131,8 +125,7 @@ def as_cartopy_crs(self): @abstractmethod def as_cartopy_projection(self): - """ - Return a cartopy projection representing our native map. + """Return a cartopy projection representing our native map. This will be the same as the :func:`~CoordSystem.as_cartopy_crs` for map projections but for spherical coord systems (which are not map @@ -150,8 +143,7 @@ def as_cartopy_projection(self): class GeogCS(CoordSystem): - """ - A geographic (ellipsoidal) coordinate system, defined by the shape of + """A geographic (ellipsoidal) coordinate system, defined by the shape of the Earth and a prime meridian. """ @@ -164,8 +156,7 @@ def __init__( inverse_flattening=None, longitude_of_prime_meridian=None, ): - """ - Create a new GeogCS. + """Create a new GeogCS. Parameters ---------- @@ -340,8 +331,7 @@ def as_cartopy_globe(self): @cached_property def _globe(self): - """ - A representation of this CRS as a Cartopy Globe. + """A representation of this CRS as a Cartopy Globe. Note ---- @@ -363,8 +353,7 @@ class that invalidates the cache. @cached_property def _crs(self): - """ - A representation of this CRS as a Cartopy CRS. + """A representation of this CRS as a Cartopy CRS. Note ---- @@ -375,8 +364,7 @@ class that invalidates the cache. return ccrs.Geodetic(self._globe) def _wipe_cached_properties(self): - """ - Wipes the cached properties on the object as part of any update to a + """Wipes the cached properties on the object as part of any update to a value that invalidates the cache. """ try: @@ -397,8 +385,7 @@ def semi_major_axis(self): @semi_major_axis.setter def semi_major_axis(self, value): - """ - Setting this property to a different value invalidates the current datum + """Setting this property to a different value invalidates the current datum (if any) because a datum encodes a specific semi-major axis. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. """ @@ -417,8 +404,7 @@ def semi_minor_axis(self): @semi_minor_axis.setter def semi_minor_axis(self, value): - """ - Setting this property to a different value invalidates the current datum + """Setting this property to a different value invalidates the current datum (if any) because a datum encodes a specific semi-minor axis. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. """ @@ -437,8 +423,7 @@ def inverse_flattening(self): @inverse_flattening.setter def inverse_flattening(self, value): - """ - Setting this property to a different value does not affect the behaviour + """Setting this property to a different value does not affect the behaviour of this object any further than the value of this property. """ wmsg = ( @@ -460,8 +445,7 @@ def datum(self): @datum.setter def datum(self, value): - """ - Setting this property to a different value invalidates the current + """Setting this property to a different value invalidates the current values of the ellipsoid measurements because a datum encodes its own ellipse. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. @@ -490,10 +474,7 @@ def from_datum(cls, datum, longitude_of_prime_meridian=None): class RotatedGeogCS(CoordSystem): - """ - A coordinate system with rotated pole, on an optional :class:`GeogCS`. - - """ + """A coordinate system with rotated pole, on an optional :class:`GeogCS`.""" grid_mapping_name = "rotated_latitude_longitude" @@ -504,8 +485,7 @@ def __init__( north_pole_grid_longitude=None, ellipsoid=None, ): - """ - Constructs a coordinate system with rotated pole, on an + """Constructs a coordinate system with rotated pole, on an optional :class:`GeogCS`. Args: @@ -601,10 +581,7 @@ def as_cartopy_projection(self): class TransverseMercator(CoordSystem): - """ - A cylindrical map projection, with XY coordinates measured in metres. - - """ + """A cylindrical map projection, with XY coordinates measured in metres.""" grid_mapping_name = "transverse_mercator" @@ -617,8 +594,7 @@ def __init__( scale_factor_at_central_meridian=None, ellipsoid=None, ): - """ - Constructs a TransverseMercator object. + """Constructs a TransverseMercator object. Args: @@ -727,10 +703,7 @@ def as_cartopy_projection(self): class Orthographic(CoordSystem): - """ - An orthographic map projection. - - """ + """An orthographic map projection.""" grid_mapping_name = "orthographic" @@ -742,8 +715,7 @@ def __init__( false_northing=None, ellipsoid=None, ): - """ - Constructs an Orthographic coord system. + """Constructs an Orthographic coord system. Args: @@ -814,10 +786,7 @@ def as_cartopy_projection(self): class VerticalPerspective(CoordSystem): - """ - A vertical/near-side perspective satellite image map projection. - - """ + """A vertical/near-side perspective satellite image map projection.""" grid_mapping_name = "vertical_perspective" @@ -830,8 +799,7 @@ def __init__( false_northing=None, ellipsoid=None, ): - """ - Constructs a Vertical Perspective coord system. + """Constructs a Vertical Perspective coord system. Args: @@ -909,10 +877,7 @@ def as_cartopy_projection(self): class Geostationary(CoordSystem): - """ - A geostationary satellite image map projection. - - """ + """A geostationary satellite image map projection.""" grid_mapping_name = "geostationary" @@ -926,8 +891,7 @@ def __init__( false_northing=None, ellipsoid=None, ): - """ - Constructs a Geostationary coord system. + """Constructs a Geostationary coord system. Args: @@ -1017,10 +981,7 @@ def as_cartopy_projection(self): class Stereographic(CoordSystem): - """ - A stereographic map projection. - - """ + """A stereographic map projection.""" grid_mapping_name = "stereographic" @@ -1034,8 +995,7 @@ def __init__( ellipsoid=None, scale_factor_at_projection_origin=None, ): - """ - Constructs a Stereographic coord system. + """Constructs a Stereographic coord system. Parameters ---------- @@ -1135,10 +1095,7 @@ def as_cartopy_projection(self): class PolarStereographic(Stereographic): - """ - A subclass of the stereographic map projection centred on a pole. - - """ + """A subclass of the stereographic map projection centred on a pole.""" grid_mapping_name = "polar_stereographic" @@ -1152,8 +1109,7 @@ def __init__( scale_factor_at_projection_origin=None, ellipsoid=None, ): - """ - Construct a Polar Stereographic coord system. + """Construct a Polar Stereographic coord system. Parameters ---------- @@ -1202,10 +1158,7 @@ def __repr__(self): class LambertConformal(CoordSystem): - """ - A coordinate system in the Lambert Conformal conic projection. - - """ + """A coordinate system in the Lambert Conformal conic projection.""" grid_mapping_name = "lambert_conformal_conic" @@ -1218,8 +1171,7 @@ def __init__( secant_latitudes=None, ellipsoid=None, ): - """ - Constructs a LambertConformal coord system. + """Constructs a LambertConformal coord system. Kwargs: @@ -1313,10 +1265,7 @@ def as_cartopy_projection(self): class Mercator(CoordSystem): - """ - A coordinate system in the Mercator projection. - - """ + """A coordinate system in the Mercator projection.""" grid_mapping_name = "mercator" @@ -1329,8 +1278,7 @@ def __init__( false_easting=None, false_northing=None, ): - """ - Constructs a Mercator coord system. + """Constructs a Mercator coord system. Kwargs: @@ -1423,10 +1371,7 @@ def as_cartopy_projection(self): class LambertAzimuthalEqualArea(CoordSystem): - """ - A coordinate system in the Lambert Azimuthal Equal Area projection. - - """ + """A coordinate system in the Lambert Azimuthal Equal Area projection.""" grid_mapping_name = "lambert_azimuthal_equal_area" @@ -1438,8 +1383,7 @@ def __init__( false_northing=None, ellipsoid=None, ): - """ - Constructs a Lambert Azimuthal Equal Area coord system. + """Constructs a Lambert Azimuthal Equal Area coord system. Kwargs: @@ -1507,10 +1451,7 @@ def as_cartopy_projection(self): class AlbersEqualArea(CoordSystem): - """ - A coordinate system in the Albers Conical Equal Area projection. - - """ + """A coordinate system in the Albers Conical Equal Area projection.""" grid_mapping_name = "albers_conical_equal_area" @@ -1523,8 +1464,7 @@ def __init__( standard_parallels=None, ellipsoid=None, ): - """ - Constructs a Albers Conical Equal Area coord system. + """Constructs a Albers Conical Equal Area coord system. Kwargs: @@ -1605,8 +1545,7 @@ def as_cartopy_projection(self): class ObliqueMercator(CoordSystem): - """ - A cylindrical map projection, with XY coordinates measured in metres. + """A cylindrical map projection, with XY coordinates measured in metres. Designed for regions not well suited to :class:`Mercator` or :class:`TransverseMercator`, as the positioning of the cylinder is more @@ -1630,8 +1569,7 @@ def __init__( scale_factor_at_projection_origin=None, ellipsoid=None, ): - """ - Constructs an ObliqueMercator object. + """Constructs an ObliqueMercator object. Parameters ---------- @@ -1721,8 +1659,7 @@ def as_cartopy_projection(self): class RotatedMercator(ObliqueMercator): - """ - :class:`ObliqueMercator` with ``azimuth_of_central_line=90``. + """:class:`ObliqueMercator` with ``azimuth_of_central_line=90``. As noted in CF versions 1.10 and earlier: @@ -1747,8 +1684,7 @@ def __init__( scale_factor_at_projection_origin=None, ellipsoid=None, ): - """ - Constructs a RotatedMercator object. + """Constructs a RotatedMercator object. Parameters ---------- diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 3aeef122f2..08bb5ee7d9 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Definitions of coordinates and other dimensional metadata. +"""Definitions of coordinates and other dimensional metadata. """ @@ -41,10 +40,7 @@ class _DimensionalMetadata(CFVariableMixin, metaclass=ABCMeta): - """ - Superclass for dimensional metadata. - - """ + """Superclass for dimensional metadata.""" _MODE_ADD = 1 _MODE_SUB = 2 @@ -73,8 +69,7 @@ def __init__( units=None, attributes=None, ): - """ - Constructs a single dimensional metadata object. + """Constructs a single dimensional metadata object. Args: @@ -129,8 +124,7 @@ def __init__( self._bounds_dm = None # Only ever set on Coord-derived instances. def __getitem__(self, keys): - """ - Returns a new dimensional metadata whose values are obtained by + """Returns a new dimensional metadata whose values are obtained by conventional array indexing. .. note:: @@ -168,8 +162,7 @@ def __getitem__(self, keys): return new_metadata def copy(self, values=None): - """ - Returns a copy of this dimensional metadata object. + """Returns a copy of this dimensional metadata object. Kwargs: @@ -190,8 +183,7 @@ def copy(self, values=None): @abstractmethod def cube_dims(self, cube): - """ - Identify the cube dims of any _DimensionalMetadata object. + """Identify the cube dims of any _DimensionalMetadata object. Return the dimensions in the cube of a matching _DimensionalMetadata object, if any. @@ -248,15 +240,11 @@ def _values(self, values): self._values_dm.data = values def _lazy_values(self): - """ - Returns a lazy array representing the dimensional metadata values. - - """ + """Returns a lazy array representing the dimensional metadata values.""" return self._values_dm.lazy_data() def _core_values(self): - """ - The values array of this dimensional metadata which may be a NumPy + """The values array of this dimensional metadata which may be a NumPy array or a dask array. """ @@ -267,8 +255,7 @@ def _core_values(self): return result def _has_lazy_values(self): - """ - Returns a boolean indicating whether the metadata's values array is a + """Returns a boolean indicating whether the metadata's values array is a lazy dask array or not. """ @@ -284,8 +271,7 @@ def summary( convert_dates=True, _section_indices=None, ): - r""" - Make a printable text summary. + r"""Make a printable text summary. Parameters ---------- @@ -644,8 +630,7 @@ def __hash__(self): return hash(id(self)) def __binary_operator__(self, other, mode_constant): - """ - Common code which is called by add, sub, mul and div + """Common code which is called by add, sub, mul and div Mode constant is one of ADD, SUB, MUL, DIV, RDIV @@ -771,8 +756,7 @@ def pointwise_convert(values): self.units = unit def is_compatible(self, other, ignore=None): - """ - Return whether the current dimensional metadata object is compatible + """Return whether the current dimensional metadata object is compatible with another. """ @@ -793,8 +777,7 @@ def is_compatible(self, other, ignore=None): @property def dtype(self): - """ - The NumPy dtype of the current dimensional metadata object, as + """The NumPy dtype of the current dimensional metadata object, as specified by its values. """ @@ -802,16 +785,14 @@ def dtype(self): @property def ndim(self): - """ - Return the number of dimensions of the current dimensional metadata + """Return the number of dimensions of the current dimensional metadata object. """ return self._values_dm.ndim def has_bounds(self): - """ - Return a boolean indicating whether the current dimensional metadata + """Return a boolean indicating whether the current dimensional metadata object has a bounds array. """ @@ -825,8 +806,7 @@ def shape(self): return self._values_dm.shape def xml_element(self, doc): - """ - Create the :class:`xml.dom.minidom.Element` that describes this + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`_DimensionalMetadata`. Args: @@ -919,8 +899,7 @@ def _xml_array_repr(data): return result def _value_type_name(self): - """ - A simple, readable name for the data type of the dimensional metadata + """A simple, readable name for the data type of the dimensional metadata values. """ @@ -948,8 +927,7 @@ def __init__( units=None, attributes=None, ): - """ - Constructs a single ancillary variable. + """Constructs a single ancillary variable. Args: @@ -993,8 +971,7 @@ def data(self, data): self._values = data def lazy_data(self): - """ - Return a lazy array representing the ancillary variable's data. + """Return a lazy array representing the ancillary variable's data. Accessing this method will never cause the data values to be loaded. Similarly, calling methods on, or indexing, the returned Array @@ -1010,24 +987,21 @@ def lazy_data(self): return super()._lazy_values() def core_data(self): - """ - The data array at the core of this ancillary variable, which may be a + """The data array at the core of this ancillary variable, which may be a NumPy array or a dask array. """ return super()._core_values() def has_lazy_data(self): - """ - Return a boolean indicating whether the ancillary variable's data array + """Return a boolean indicating whether the ancillary variable's data array is a lazy dask array or not. """ return super()._has_lazy_values() def cube_dims(self, cube): - """ - Return the cube dimensions of this AncillaryVariable. + """Return the cube dimensions of this AncillaryVariable. Equivalent to "cube.ancillary_variable_dims(self)". @@ -1036,8 +1010,7 @@ def cube_dims(self, cube): class CellMeasure(AncillaryVariable): - """ - A CF Cell Measure, providing area or volume properties of a cell + """A CF Cell Measure, providing area or volume properties of a cell where these cannot be inferred from the Coordinates and Coordinate Reference System. @@ -1053,8 +1026,7 @@ def __init__( attributes=None, measure=None, ): - """ - Constructs a single cell measure. + """Constructs a single cell measure. Args: @@ -1111,8 +1083,7 @@ def measure(self, measure): self._metadata_manager.measure = measure def cube_dims(self, cube): - """ - Return the cube dimensions of this CellMeasure. + """Return the cube dimensions of this CellMeasure. Equivalent to "cube.cell_measure_dims(self)". @@ -1120,8 +1091,7 @@ def cube_dims(self, cube): return cube.cell_measure_dims(self) def xml_element(self, doc): - """ - Create the :class:`xml.dom.minidom.Element` that describes this + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`CellMeasure`. Args: @@ -1166,8 +1136,7 @@ def __new__( min_inclusive=True, max_inclusive=True, ): - """ - Create a CoordExtent for the specified coordinate and range of + """Create a CoordExtent for the specified coordinate and range of values. Args: @@ -1210,8 +1179,7 @@ def __new__( def _get_2d_coord_bound_grid(bounds): - """ - Creates a grid using the bounds of a 2D coordinate with 4 sided cells. + """Creates a grid using the bounds of a 2D coordinate with 4 sided cells. Assumes that the four vertices of the cells are in an anti-clockwise order (bottom-left, bottom-right, top-right, top-left). @@ -1253,8 +1221,7 @@ def _get_2d_coord_bound_grid(bounds): class Cell(namedtuple("Cell", ["point", "bound"])): - """ - An immutable representation of a single cell of a coordinate, including the + """An immutable representation of a single cell of a coordinate, including the sample point and/or boundary position. Notes on cell comparison: @@ -1290,10 +1257,7 @@ class Cell(namedtuple("Cell", ["point", "bound"])): __array_priority__ = 100 def __new__(cls, point=None, bound=None): - """ - Construct a Cell from point or point-and-bound information. - - """ + """Construct a Cell from point or point-and-bound information.""" if point is None: raise ValueError("Point must be defined.") @@ -1341,8 +1305,7 @@ def __hash__(self): return hash((self.point, bound)) def __eq__(self, other): - """ - Compares Cell equality depending on the type of the object to be + """Compares Cell equality depending on the type of the object to be compared. """ @@ -1372,8 +1335,7 @@ def __ne__(self, other): return result def __common_cmp__(self, other, operator_method): - """ - Common method called by the rich comparison operators. The method of + """Common method called by the rich comparison operators. The method of checking equality depends on the type of the object to be compared. Cell vs Cell comparison is used to define a strict order. @@ -1479,8 +1441,7 @@ def __str__(self): return str(self.point) def contains_point(self, point): - """ - For a bounded cell, returns whether the given point lies within the + """For a bounded cell, returns whether the given point lies within the bounds. .. note:: The test carried out is equivalent to min(bound) @@ -1493,10 +1454,7 @@ def contains_point(self, point): class Coord(_DimensionalMetadata): - """ - Abstract base class for coordinates. - - """ + """Abstract base class for coordinates.""" _values_array_name = "points" @@ -1513,8 +1471,7 @@ def __init__( coord_system=None, climatological=False, ): - """ - Coordinate abstract base class. As of ``v3.0.0`` you **cannot** create an instance of :class:`Coord`. + """Coordinate abstract base class. As of ``v3.0.0`` you **cannot** create an instance of :class:`Coord`. Args: @@ -1582,8 +1539,7 @@ def __init__( self._ignore_axis = DEFAULT_IGNORE_AXIS def copy(self, points=None, bounds=None): - """ - Returns a copy of this coordinate. + """Returns a copy of this coordinate. Kwargs: @@ -1654,8 +1610,7 @@ def points(self, points): @property def bounds(self): - """ - The coordinate bounds values, as a NumPy array, + """The coordinate bounds values, as a NumPy array, or None if no bound values are defined. .. note:: The shape of the bound array should be: ``points.shape + @@ -1694,8 +1649,7 @@ def coord_system(self, value): @property def climatological(self): - """ - A boolean that controls whether the coordinate is a climatological + """A boolean that controls whether the coordinate is a climatological time axis, in which case the bounds represent a climatological period rather than a normal period. @@ -1730,8 +1684,7 @@ def climatological(self, value): @property def ignore_axis(self): - """ - A boolean that controls whether guess_coord_axis acts on this + """A boolean that controls whether guess_coord_axis acts on this coordinate. Defaults to False, and when set to True it will be skipped by @@ -1747,8 +1700,7 @@ def ignore_axis(self, value): self._ignore_axis = value def lazy_points(self): - """ - Return a lazy array representing the coord points. + """Return a lazy array representing the coord points. Accessing this method will never cause the points values to be loaded. Similarly, calling methods on, or indexing, the returned Array @@ -1764,8 +1716,7 @@ def lazy_points(self): return super()._lazy_values() def lazy_bounds(self): - """ - Return a lazy array representing the coord bounds. + """Return a lazy array representing the coord bounds. Accessing this method will never cause the bounds values to be loaded. Similarly, calling methods on, or indexing, the returned Array @@ -1785,16 +1736,14 @@ def lazy_bounds(self): return lazy_bounds def core_points(self): - """ - The points array at the core of this coord, which may be a NumPy array + """The points array at the core of this coord, which may be a NumPy array or a dask array. """ return super()._core_values() def core_bounds(self): - """ - The points array at the core of this coord, which may be a NumPy array + """The points array at the core of this coord, which may be a NumPy array or a dask array. """ @@ -1806,16 +1755,14 @@ def core_bounds(self): return result def has_lazy_points(self): - """ - Return a boolean indicating whether the coord's points array is a + """Return a boolean indicating whether the coord's points array is a lazy dask array or not. """ return super()._has_lazy_values() def has_lazy_bounds(self): - """ - Return a boolean indicating whether the coord's bounds array is a + """Return a boolean indicating whether the coord's bounds array is a lazy dask array or not. """ @@ -1834,8 +1781,7 @@ def __hash__(self): return hash(id(self)) def cube_dims(self, cube): - """ - Return the cube dimensions of this Coord. + """Return the cube dimensions of this Coord. Equivalent to "cube.coord_dims(self)". @@ -1843,8 +1789,7 @@ def cube_dims(self, cube): return cube.coord_dims(self) def convert_units(self, unit): - r""" - Change the coordinate's units, converting the values in its points + r"""Change the coordinate's units, converting the values in its points and bounds arrays. For example, if a coordinate's :attr:`~iris.coords.Coord.units` @@ -1863,8 +1808,7 @@ def convert_units(self, unit): super().convert_units(unit=unit) def cells(self): - """ - Returns an iterable of Cell instances for this Coord. + """Returns an iterable of Cell instances for this Coord. For example:: @@ -1914,8 +1858,7 @@ def _sanity_check_bounds(self): ) def _discontiguity_in_bounds(self, rtol=1e-5, atol=1e-8): - """ - Checks that the bounds of the coordinate are contiguous. + """Checks that the bounds of the coordinate are contiguous. Kwargs: * rtol: (float) @@ -2005,8 +1948,7 @@ def mod360_adjust(compare_axis): return contiguous, diffs def is_contiguous(self, rtol=1e-05, atol=1e-08): - """ - Return True if, and only if, this Coord is bounded with contiguous + """Return True if, and only if, this Coord is bounded with contiguous bounds to within the specified relative and absolute tolerances. 1D coords are contiguous if the upper bound of a cell aligns, @@ -2035,8 +1977,7 @@ def is_contiguous(self, rtol=1e-05, atol=1e-08): return contiguous def contiguous_bounds(self): - """ - Returns the N+1 bound values for a contiguous bounded 1D coordinate + """Returns the N+1 bound values for a contiguous bounded 1D coordinate of length N, or the (N+1, M+1) bound values for a contiguous bounded 2D coordinate of shape (N, M). @@ -2100,8 +2041,7 @@ def is_monotonic(self): return True def is_compatible(self, other, ignore=None): - """ - Return whether the coordinate is compatible with another. + """Return whether the coordinate is compatible with another. Compatibility is determined by comparing :meth:`iris.coords.Coord.name()`, :attr:`iris.coords.Coord.units`, @@ -2131,8 +2071,7 @@ def is_compatible(self, other, ignore=None): @property def bounds_dtype(self): - """ - The NumPy dtype of the coord's bounds. Will be `None` if the coord + """The NumPy dtype of the coord's bounds. Will be `None` if the coord does not have bounds. """ @@ -2143,10 +2082,7 @@ def bounds_dtype(self): @property def nbounds(self): - """ - Return the number of bounds that this coordinate has (0 for no bounds). - - """ + """Return the number of bounds that this coordinate has (0 for no bounds).""" nbounds = 0 if self.has_bounds(): nbounds = self._bounds_dm.shape[-1] @@ -2157,8 +2093,7 @@ def has_bounds(self): return self._bounds_dm is not None def cell(self, index): - """ - Return the single :class:`Cell` instance which results from slicing the + """Return the single :class:`Cell` instance which results from slicing the points/bounds with the given index. """ index = iris.util._build_full_slice_given_keys(index, self.ndim) @@ -2182,8 +2117,7 @@ def cell(self, index): return Cell(point, bound) def collapsed(self, dims_to_collapse=None): - """ - Returns a copy of this coordinate, which has been collapsed along + """Returns a copy of this coordinate, which has been collapsed along the specified dimensions. Replaces the points & bounds with a simple bounded region. @@ -2280,8 +2214,7 @@ def serialize(x): return coord def _guess_bounds(self, bound_position=0.5): - """ - Return bounds for this coordinate based on its points. + """Return bounds for this coordinate based on its points. Kwargs: @@ -2341,8 +2274,7 @@ def _guess_bounds(self, bound_position=0.5): return bounds def guess_bounds(self, bound_position=0.5): - """ - Add contiguous bounds to a coordinate, calculated from its points. + """Add contiguous bounds to a coordinate, calculated from its points. Puts a cell boundary at the specified fraction between each point and the next, plus extrapolated lowermost and uppermost bound points, so @@ -2374,8 +2306,7 @@ def guess_bounds(self, bound_position=0.5): self.bounds = self._guess_bounds(bound_position) def intersect(self, other, return_indices=False): - """ - Returns a new coordinate from the intersection of two coordinates. + """Returns a new coordinate from the intersection of two coordinates. Both coordinates must be compatible as defined by :meth:`~iris.coords.Coord.is_compatible`. @@ -2420,8 +2351,7 @@ def intersect(self, other, return_indices=False): return self[self_intersect_indices] def nearest_neighbour_index(self, point): - """ - Returns the index of the cell nearest to the given point. + """Returns the index of the cell nearest to the given point. Only works for one-dimensional coordinates. @@ -2516,8 +2446,7 @@ def nearest_neighbour_index(self, point): return result_index def xml_element(self, doc): - """ - Create the :class:`xml.dom.minidom.Element` that describes this + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`Coord`. Args: @@ -2551,8 +2480,7 @@ def _xml_id_extra(self, unique_value): class DimCoord(Coord): - """ - A coordinate that is 1D, and numeric, with values that have a strict monotonic ordering. Missing values are not + """A coordinate that is 1D, and numeric, with values that have a strict monotonic ordering. Missing values are not permitted in a :class:`DimCoord`. """ @@ -2573,8 +2501,7 @@ def from_regular( climatological=False, with_bounds=False, ): - """ - Create a :class:`DimCoord` with regularly spaced points, and + """Create a :class:`DimCoord` with regularly spaced points, and optionally bounds. The majority of the arguments are defined as for @@ -2635,8 +2562,7 @@ def __init__( circular=False, climatological=False, ): - """ - Create a 1D, numeric, and strictly monotonic coordinate with **immutable** points and bounds. + """Create a 1D, numeric, and strictly monotonic coordinate with **immutable** points and bounds. Missing values are not permitted. @@ -2707,8 +2633,7 @@ def __init__( self.circular = circular def __deepcopy__(self, memo): - """ - coord.__deepcopy__() -> Deep copy of coordinate. + """coord.__deepcopy__() -> Deep copy of coordinate. Used if copy.deepcopy is called on a coordinate. @@ -2773,8 +2698,7 @@ def collapsed(self, dims_to_collapse=None): return coord def _new_points_requirements(self, points): - """ - Confirm that a new set of coord points adheres to the requirements for + """Confirm that a new set of coord points adheres to the requirements for :class:`~iris.coords.DimCoord` points, being: * points are scalar or 1D, * points are numeric, @@ -2820,8 +2744,7 @@ def _values(self, points): points.flags.writeable = False def _new_bounds_requirements(self, bounds): - """ - Confirm that a new set of coord bounds adheres to the requirements for + """Confirm that a new set of coord bounds adheres to the requirements for :class:`~iris.coords.DimCoord` bounds, being: * bounds are compatible in shape with the points * bounds are numeric, @@ -2914,8 +2837,7 @@ def is_monotonic(self): return True def xml_element(self, doc): - """ - Create the :class:`xml.dom.minidom.Element` that describes this + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`DimCoord`. Args: @@ -2935,14 +2857,10 @@ def xml_element(self, doc): class AuxCoord(Coord): - """ - A CF auxiliary coordinate. - - """ + """A CF auxiliary coordinate.""" def __init__(self, *args, **kwargs): - """ - Create a coordinate with **mutable** points and bounds. + """Create a coordinate with **mutable** points and bounds. Args: @@ -2997,10 +2915,7 @@ def __init__(self, *args, **kwargs): class CellMethod(iris.util._OrderedHashable): - """ - Represents a sub-cell pre-processing operation. - - """ + """Represents a sub-cell pre-processing operation.""" # Declare the attribute names relevant to the _OrderedHashable behaviour. _names = ("method", "coord_names", "intervals", "comments") @@ -3019,8 +2934,7 @@ class CellMethod(iris.util._OrderedHashable): comments = None def __init__(self, method, coords=None, intervals=None, comments=None): - """ - Args: + """Args: * method: The name of the operation. @@ -3104,8 +3018,7 @@ def __add__(self, other): return NotImplemented def xml_element(self, doc): - """ - Create the :class:`xml.dom.minidom.Element` that describes this + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`CellMethod`. Args: diff --git a/lib/iris/cube.py b/lib/iris/cube.py index c1f8069195..7e35893533 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Classes for representing multi-dimensional data with metadata. +"""Classes for representing multi-dimensional data with metadata. """ @@ -57,10 +56,7 @@ class _CubeFilter: - """ - A constraint, paired with a list of cubes matching that constraint. - - """ + """A constraint, paired with a list of cubes matching that constraint.""" def __init__(self, constraint, cubes=None): self.constraint = constraint @@ -72,8 +68,7 @@ def __len__(self): return len(self.cubes) def add(self, cube): - """ - Adds the appropriate (sub)cube to the list of cubes where it + """Adds the appropriate (sub)cube to the list of cubes where it matches the constraint. """ @@ -82,8 +77,7 @@ def add(self, cube): self.cubes.append(sub_cube) def merged(self, unique=False): - """ - Returns a new :class:`_CubeFilter` by merging the list of + """Returns a new :class:`_CubeFilter` by merging the list of cubes. Kwargs: @@ -97,15 +91,11 @@ def merged(self, unique=False): class _CubeFilterCollection: - """ - A list of _CubeFilter instances. - - """ + """A list of _CubeFilter instances.""" @staticmethod def from_cubes(cubes, constraints=None): - """ - Creates a new collection from an iterable of cubes, and some + """Creates a new collection from an iterable of cubes, and some optional constraints. """ @@ -120,8 +110,7 @@ def __init__(self, pairs): self.pairs = pairs def add_cube(self, cube): - """ - Adds the given :class:`~iris.cube.Cube` to all of the relevant + """Adds the given :class:`~iris.cube.Cube` to all of the relevant constraint pairs. """ @@ -129,8 +118,7 @@ def add_cube(self, cube): pair.add(cube) def cubes(self): - """ - Returns all the cubes in this collection concatenated into a + """Returns all the cubes in this collection concatenated into a single :class:`CubeList`. """ @@ -140,8 +128,7 @@ def cubes(self): return result def merged(self, unique=False): - """ - Returns a new :class:`_CubeFilterCollection` by merging all the cube + """Returns a new :class:`_CubeFilterCollection` by merging all the cube lists of this collection. Kwargs: @@ -155,8 +142,7 @@ def merged(self, unique=False): class CubeList(list): - """ - All the functionality of a standard :class:`list` with added "Cube" + """All the functionality of a standard :class:`list` with added "Cube" context. """ @@ -209,8 +195,7 @@ def __getitem__(self, keys): return result def __getslice__(self, start, stop): - """ - x.__getslice__(i, j) <==> x[i:j] + """x.__getslice__(i, j) <==> x[i:j] Use of negative indices is not supported. @@ -220,9 +205,7 @@ def __getslice__(self, start, stop): return result def __iadd__(self, other_cubes): - """ - Add a sequence of cubes to the cubelist in place. - """ + """Add a sequence of cubes to the cubelist in place.""" return super(CubeList, self).__iadd__(CubeList(other_cubes)) def __setitem__(self, key, cube_or_sequence): @@ -237,15 +220,12 @@ def __setitem__(self, key, cube_or_sequence): super(CubeList, self).__setitem__(key, cube_or_sequence) def append(self, cube): - """ - Append a cube. - """ + """Append a cube.""" self._assert_is_cube(cube) super(CubeList, self).append(cube) def extend(self, other_cubes): - """ - Extend cubelist by appending the cubes contained in other_cubes. + """Extend cubelist by appending the cubes contained in other_cubes. Args: @@ -255,9 +235,7 @@ def extend(self, other_cubes): super(CubeList, self).extend(CubeList(other_cubes)) def insert(self, index, cube): - """ - Insert a cube before index. - """ + """Insert a cube before index.""" self._assert_is_cube(cube) super(CubeList, self).insert(index, cube) @@ -282,8 +260,7 @@ def xml(self, checksum=False, order=True, byteorder=True): return doc.toprettyxml(indent=" ") def extract(self, constraints): - """ - Filter each of the cubes which can be filtered by the given + """Filter each of the cubes which can be filtered by the given constraints. This method iterates over each constraint given, and subsets each of @@ -300,8 +277,7 @@ def extract(self, constraints): return self._extract_and_merge(self, constraints, strict=False) def extract_cube(self, constraint): - """ - Extract a single cube from a CubeList, and return it. + """Extract a single cube from a CubeList, and return it. Raise an error if the extract produces no cubes, or more than one. Args: @@ -320,8 +296,7 @@ def extract_cube(self, constraint): ) def extract_cubes(self, constraints): - """ - Extract specific cubes from a CubeList, one for each given constraint. + """Extract specific cubes from a CubeList, one for each given constraint. Each constraint must produce exactly one cube, otherwise an error is raised. @@ -376,8 +351,7 @@ def _extract_and_merge(cubes, constraints, strict=False, return_single_cube=Fals return result def extract_overlapping(self, coord_names): - """ - Returns a :class:`CubeList` of cubes extracted over regions + """Returns a :class:`CubeList` of cubes extracted over regions where the coordinates overlap, for the coordinates in coord_names. @@ -404,8 +378,7 @@ def overlap_fn(cell): return self.extract(iris.Constraint(coord_values=coord_values)) def merge_cube(self): - """ - Return the merged contents of the :class:`CubeList` as a single + """Return the merged contents of the :class:`CubeList` as a single :class:`Cube`. If it is not possible to merge the `CubeList` into a single @@ -441,8 +414,7 @@ def merge_cube(self): return merged_cube def merge(self, unique=True): - """ - Returns the :class:`CubeList` resulting from merging this + """Returns the :class:`CubeList` resulting from merging this :class:`CubeList`. Kwargs: @@ -543,8 +515,7 @@ def concatenate_cube( check_ancils=True, check_derived_coords=True, ): - """ - Return the concatenated contents of the :class:`CubeList` as a single + """Return the concatenated contents of the :class:`CubeList` as a single :class:`Cube`. If it is not possible to concatenate the `CubeList` into a single @@ -623,8 +594,7 @@ def concatenate( check_ancils=True, check_derived_coords=True, ): - """ - Concatenate the cubes over their common dimensions. + """Concatenate the cubes over their common dimensions. Kwargs: @@ -741,8 +711,7 @@ def concatenate( ) def realise_data(self): - """ - Fetch 'real' data for all cubes, in a shared calculation. + """Fetch 'real' data for all cubes, in a shared calculation. This computes any lazy data, equivalent to accessing each `cube.data`. However, lazy calculations and data fetches can be shared between the @@ -768,16 +737,13 @@ def realise_data(self): _lazy.co_realise_cubes(*self) def copy(self): - """ - Return a CubeList when CubeList.copy() is called. - """ + """Return a CubeList when CubeList.copy() is called.""" if isinstance(self, CubeList): return deepcopy(self) def _is_single_item(testee): - """ - Return whether this is a single item, rather than an iterable. + """Return whether this is a single item, rather than an iterable. We count string types as 'single', also. @@ -786,8 +752,7 @@ def _is_single_item(testee): class CubeAttrsDict(MutableMapping): - """ - A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, + """A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, providing unified user access to combined cube "local" and "global" attributes dictionaries, with the access behaviour of an ordinary (single) dictionary. @@ -838,8 +803,7 @@ def __init__( locals: Optional[Mapping] = None, globals: Optional[Mapping] = None, ): - """ - Create a cube attributes dictionary. + """Create a cube attributes dictionary. We support initialisation from a single generic mapping input, using the default global/local assignment rules explained at :meth:`__setattr__`, or from @@ -964,8 +928,7 @@ def __eq__(self, other): # 'copy', 'update', '__ior__', '__or__', '__ror__' and 'fromkeys'. # def copy(self): - """ - Return a copy. + """Return a copy. Implemented with deep copying, consistent with general Iris usage. @@ -973,8 +936,7 @@ def copy(self): return CubeAttrsDict(self) def update(self, *args, **kwargs): - """ - Update by adding items from a mapping arg, or keyword-values. + """Update by adding items from a mapping arg, or keyword-values. If the argument is a split dictionary, preserve the local/global nature of its keys. @@ -1001,8 +963,7 @@ def __ior__(self, arg): return self def __ror__(self, arg): - """ - Implement 'ror' via 'update'. + """Implement 'ror' via 'update'. This needs to promote, such that the result is a CubeAttrsDict. """ @@ -1014,8 +975,7 @@ def __ror__(self, arg): @classmethod def fromkeys(cls, iterable, value=None): - """ - Create a new object with keys taken from an argument, all set to one value. + """Create a new object with keys taken from an argument, all set to one value. If the argument is a split dictionary, preserve the local/global nature of its keys. @@ -1039,8 +999,7 @@ def fromkeys(cls, iterable, value=None): # def __iter__(self): - """ - Define the combined iteration order. + """Define the combined iteration order. Result is: all global keys, then all local ones, but omitting duplicates. @@ -1059,8 +1018,7 @@ def __len__(self): return len(list(iter(self))) def __getitem__(self, key): - """ - Fetch an item from the "combined attributes". + """Fetch an item from the "combined attributes". If the name is present in *both* ``self.locals`` and ``self.globals``, then the local value is returned. @@ -1073,8 +1031,7 @@ def __getitem__(self, key): return store[key] def __setitem__(self, key, value): - """ - Assign an attribute value. + """Assign an attribute value. This may be assigned in either ``self.locals`` or ``self.globals``, chosen as follows: @@ -1110,8 +1067,7 @@ def __setitem__(self, key, value): store[key] = value def __delitem__(self, key): - """ - Remove an attribute. + """Remove an attribute. Delete from both local + global. @@ -1132,8 +1088,7 @@ def __repr__(self): class Cube(CFVariableMixin): - """ - A single Iris cube of data and metadata. + """A single Iris cube of data and metadata. Typically obtained from :func:`iris.load`, :func:`iris.load_cube`, :func:`iris.load_cubes`, or from the manipulation of existing cubes. @@ -1173,8 +1128,7 @@ class Cube(CFVariableMixin): @classmethod def _sort_xml_attrs(cls, doc): - """ - Takes an xml document and returns a copy with all element + """Takes an xml document and returns a copy with all element attributes sorted in alphabetical order. This is a private utility method required by iris to maintain @@ -1239,8 +1193,7 @@ def __init__( cell_measures_and_dims=None, ancillary_variables_and_dims=None, ): - """ - Creates a cube with data and optional metadata. + """Creates a cube with data and optional metadata. Not typically used - normally cubes are obtained by loading data (e.g. :func:`iris.load`) or from manipulating existing cubes. @@ -1377,8 +1330,7 @@ def __init__( @property def _names(self): - """ - A tuple containing the value of each name participating in the identity + """A tuple containing the value of each name participating in the identity of a :class:`iris.cube.Cube`. This includes the standard name, long name, NetCDF variable name, and the STASH from the attributes dictionary. @@ -1395,16 +1347,14 @@ def attributes(self) -> CubeAttrsDict: @attributes.setter def attributes(self, attributes: Optional[Mapping]): - """ - An override to CfVariableMixin.attributes.setter, which ensures that Cube + """An override to CfVariableMixin.attributes.setter, which ensures that Cube attributes are stored in a way which distinguishes global + local ones. """ self._metadata_manager.attributes = CubeAttrsDict(attributes or {}) def _dimensional_metadata(self, name_or_dimensional_metadata): - """ - Return a single _DimensionalMetadata instance that matches the given + """Return a single _DimensionalMetadata instance that matches the given name_or_dimensional_metadata. If one is not found, raise an error. """ @@ -1425,8 +1375,7 @@ def _dimensional_metadata(self, name_or_dimensional_metadata): return found_item def is_compatible(self, other, ignore=None): - """ - Return whether the cube is compatible with another. + """Return whether the cube is compatible with another. Compatibility is determined by comparing :meth:`iris.cube.Cube.name()`, :attr:`iris.cube.Cube.units`, :attr:`iris.cube.Cube.cell_methods` and @@ -1477,8 +1426,7 @@ def is_compatible(self, other, ignore=None): return compatible def convert_units(self, unit): - """ - Change the cube's units, converting the values in the data array. + """Change the cube's units, converting the values in the data array. For example, if a cube's :attr:`~iris.cube.Cube.units` are kelvin then:: @@ -1519,8 +1467,7 @@ def add_cell_method(self, cell_method): self.cell_methods += (cell_method,) def add_aux_coord(self, coord, data_dims=None): - """ - Adds a CF auxiliary coordinate to the cube. + """Adds a CF auxiliary coordinate to the cube. Args: @@ -1627,8 +1574,7 @@ def _add_unique_aux_coord(self, coord, data_dims): self._aux_coords_and_dims.append((coord, data_dims)) def add_aux_factory(self, aux_factory): - """ - Adds an auxiliary coordinate factory to the cube. + """Adds an auxiliary coordinate factory to the cube. Args: @@ -1660,8 +1606,7 @@ def coordsonly(coords_and_dims): self._aux_factories.append(aux_factory) def add_cell_measure(self, cell_measure, data_dims=None): - """ - Adds a CF cell measure to the cube. + """Adds a CF cell measure to the cube. Args: @@ -1693,8 +1638,7 @@ def add_cell_measure(self, cell_measure, data_dims=None): ) def add_ancillary_variable(self, ancillary_variable, data_dims=None): - """ - Adds a CF ancillary variable to the cube. + """Adds a CF ancillary variable to the cube. Args: @@ -1724,8 +1668,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): ) def add_dim_coord(self, dim_coord, data_dim): - """ - Add a CF coordinate to the cube. + """Add a CF coordinate to the cube. Args: @@ -1809,8 +1752,7 @@ def _remove_coord(self, coord): self.remove_aux_factory(aux_factory) def remove_coord(self, coord): - """ - Removes a coordinate from the cube. + """Removes a coordinate from the cube. Args: @@ -1828,8 +1770,7 @@ def remove_coord(self, coord): factory.update(coord) def remove_cell_measure(self, cell_measure): - """ - Removes a cell measure from the cube. + """Removes a cell measure from the cube. Args: @@ -1864,8 +1805,7 @@ def remove_cell_measure(self, cell_measure): ] def remove_ancillary_variable(self, ancillary_variable): - """ - Removes an ancillary variable from the cube. + """Removes an ancillary variable from the cube. Args: @@ -1882,10 +1822,7 @@ def remove_ancillary_variable(self, ancillary_variable): ] def replace_coord(self, new_coord): - """ - Replace the coordinate whose metadata matches the given coordinate. - - """ + """Replace the coordinate whose metadata matches the given coordinate.""" old_coord = self.coord(new_coord) dims = self.coord_dims(old_coord) was_dimensioned = old_coord in self.dim_coords @@ -1899,8 +1836,7 @@ def replace_coord(self, new_coord): factory.update(old_coord, new_coord) def coord_dims(self, coord): - """ - Returns a tuple of the data dimensions relevant to the given + """Returns a tuple of the data dimensions relevant to the given coordinate. When searching for the given coordinate in the cube the comparison is @@ -1961,8 +1897,7 @@ def matcher(factory): return match def cell_measure_dims(self, cell_measure): - """ - Returns a tuple of the data dimensions relevant to the given + """Returns a tuple of the data dimensions relevant to the given CellMeasure. * cell_measure (string or CellMeasure) @@ -1983,8 +1918,7 @@ def cell_measure_dims(self, cell_measure): return matches[0] def ancillary_variable_dims(self, ancillary_variable): - """ - Returns a tuple of the data dimensions relevant to the given + """Returns a tuple of the data dimensions relevant to the given AncillaryVariable. * ancillary_variable (string or AncillaryVariable) @@ -2009,8 +1943,7 @@ def ancillary_variable_dims(self, ancillary_variable): return matches[0] def aux_factory(self, name=None, standard_name=None, long_name=None, var_name=None): - """ - Returns the single coordinate factory that matches the criteria, + """Returns the single coordinate factory that matches the criteria, or raises an error if not found. Kwargs: @@ -2085,8 +2018,7 @@ def coords( dim_coords=None, mesh_coords=None, ): - """ - Return a list of coordinates from the :class:`Cube` that match the + """Return a list of coordinates from the :class:`Cube` that match the provided criteria. .. seealso:: @@ -2252,8 +2184,7 @@ def coord( dim_coords=None, mesh_coords=None, ): - """ - Return a single coordinate from the :class:`Cube` that matches the + """Return a single coordinate from the :class:`Cube` that matches the provided criteria. .. note:: @@ -2370,8 +2301,7 @@ def coord( return coords[0] def coord_system(self, spec=None): - """ - Find the coordinate system of the given type. + """Find the coordinate system of the given type. If no target coordinate system is provided then find any available coordinate system. @@ -2429,8 +2359,7 @@ def _any_meshcoord(self): @property def mesh(self): - """ - Return the unstructured :class:`~iris.experimental.ugrid.Mesh` + """Return the unstructured :class:`~iris.experimental.ugrid.Mesh` associated with the cube, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. @@ -2450,8 +2379,7 @@ def mesh(self): @property def location(self): - """ - Return the mesh "location" of the cube data, if the cube has any + """Return the mesh "location" of the cube data, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. @@ -2470,8 +2398,7 @@ def location(self): return result def mesh_dim(self): - """ - Return the cube dimension of the mesh, if the cube has any + """Return the cube dimension of the mesh, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. @@ -2489,8 +2416,7 @@ def mesh_dim(self): return result def cell_measures(self, name_or_cell_measure=None): - """ - Return a list of cell measures in this cube fitting the given criteria. + """Return a list of cell measures in this cube fitting the given criteria. Kwargs: @@ -2527,8 +2453,7 @@ def cell_measures(self, name_or_cell_measure=None): return cell_measures def cell_measure(self, name_or_cell_measure=None): - """ - Return a single cell_measure given the same arguments as + """Return a single cell_measure given the same arguments as :meth:`Cube.cell_measures`. .. note:: @@ -2575,8 +2500,7 @@ def cell_measure(self, name_or_cell_measure=None): return cell_measures[0] def ancillary_variables(self, name_or_ancillary_variable=None): - """ - Return a list of ancillary variable in this cube fitting the given + """Return a list of ancillary variable in this cube fitting the given criteria. Kwargs: @@ -2615,8 +2539,7 @@ def ancillary_variables(self, name_or_ancillary_variable=None): return ancillary_variables def ancillary_variable(self, name_or_ancillary_variable=None): - """ - Return a single ancillary_variable given the same arguments as + """Return a single ancillary_variable given the same arguments as :meth:`Cube.ancillary_variables`. .. note:: @@ -2667,8 +2590,7 @@ def ancillary_variable(self, name_or_ancillary_variable=None): @property def cell_methods(self): - """ - Tuple of :class:`iris.coords.CellMethod` representing the processing + """Tuple of :class:`iris.coords.CellMethod` representing the processing done on the phenomenon. """ @@ -2694,8 +2616,7 @@ def cell_methods(self, cell_methods: Iterable): self._metadata_manager.cell_methods = cell_methods def core_data(self): - """ - Retrieve the data array of this :class:`~iris.cube.Cube` in its + """Retrieve the data array of this :class:`~iris.cube.Cube` in its current state, which will either be real or lazy. If this :class:`~iris.cube.Cube` has lazy data, accessing its data @@ -2713,8 +2634,7 @@ def shape(self): @property def dtype(self): - """ - The data type of the values in the data array of this + """The data type of the values in the data array of this :class:`~iris.cube.Cube`. """ @@ -2726,8 +2646,7 @@ def ndim(self): return self._data_manager.ndim def lazy_data(self): - """ - Return a "lazy array" representing the Cube data. A lazy array + """Return a "lazy array" representing the Cube data. A lazy array describes an array whose data values have not been loaded into memory from disk. @@ -2749,8 +2668,7 @@ def lazy_data(self): @property def data(self): - """ - The :class:`numpy.ndarray` representing the multi-dimensional data of + """The :class:`numpy.ndarray` representing the multi-dimensional data of the cube. .. note:: @@ -2787,8 +2705,7 @@ def data(self, data): self._data_manager.data = data def has_lazy_data(self): - """ - Details whether this :class:`~iris.cube.Cube` has lazy data. + """Details whether this :class:`~iris.cube.Cube` has lazy data. Returns: Boolean. @@ -2798,8 +2715,7 @@ def has_lazy_data(self): @property def dim_coords(self): - """ - Return a tuple of all the dimension coordinates, ordered by dimension. + """Return a tuple of all the dimension coordinates, ordered by dimension. .. note:: @@ -2823,8 +2739,7 @@ def dim_coords(self): @property def aux_coords(self): - """ - Return a tuple of all the auxiliary coordinates, ordered by + """Return a tuple of all the auxiliary coordinates, ordered by dimension(s). """ @@ -2840,8 +2755,7 @@ def aux_coords(self): @property def derived_coords(self): - """ - Return a tuple of all the coordinates generated by the coordinate + """Return a tuple of all the coordinates generated by the coordinate factories. """ @@ -2858,8 +2772,7 @@ def aux_factories(self): return tuple(self._aux_factories) def summary(self, shorten=False, name_padding=35): - """ - String summary of the Cube with name+units, a list of dim coord names + """String summary of the Cube with name+units, a list of dim coord names versus length and, optionally, a summary of all other components. Kwargs: @@ -2896,8 +2809,7 @@ def _repr_html_(self): __iter__ = None def __getitem__(self, keys): - """ - Cube indexing (through use of square bracket notation) has been + """Cube indexing (through use of square bracket notation) has been implemented at the data level. That is, the indices provided to this method should be aligned to the data of the cube, and thus the indices requested must be applicable directly to the cube.data attribute. All @@ -3004,8 +2916,7 @@ def new_ancillary_variable_dims(av_): return cube def subset(self, coord): - """ - Get a subset of the cube by providing the desired resultant + """Get a subset of the cube by providing the desired resultant coordinate. If the coordinate provided applies to the whole cube; the whole cube is returned. As such, the operation is not strict. @@ -3054,8 +2965,7 @@ def subset(self, coord): return result def extract(self, constraint): - """ - Filter the cube by the given constraint using + """Filter the cube by the given constraint using :meth:`iris.Constraint.extract` method. """ @@ -3064,8 +2974,7 @@ def extract(self, constraint): return constraint.extract(self) def intersection(self, *args, **kwargs): - """ - Return the intersection of the cube with specified coordinate + """Return the intersection of the cube with specified coordinate ranges. Coordinate ranges can be specified as: @@ -3269,8 +3178,7 @@ def _intersect_derive_subset(self, coord, points, bounds, inside_indices): non_zero_step_indices = np.nonzero(step)[0] def dim_coord_subset(): - """ - Derive the subset for dimension coordinates. + """Derive the subset for dimension coordinates. Ensure that we do not wrap if blocks are at the very edge. That is, if the very edge is wrapped and corresponds to base + period, @@ -3437,9 +3345,7 @@ def _intersect_modulus( return subsets, points, bounds def _as_list_of_coords(self, names_or_coords): - """ - Convert a name, coord, or list of names/coords to a list of coords. - """ + """Convert a name, coord, or list of names/coords to a list of coords.""" # If not iterable, convert to list of a single item if _is_single_item(names_or_coords): names_or_coords = [names_or_coords] @@ -3461,8 +3367,7 @@ def _as_list_of_coords(self, names_or_coords): return coords def slices_over(self, ref_to_slice): - """ - Return an iterator of all subcubes along a given coordinate or + """Return an iterator of all subcubes along a given coordinate or dimension index, or multiple of these. Args: @@ -3517,8 +3422,7 @@ def slices_over(self, ref_to_slice): return self.slices(opposite_dims, ordered=False) def slices(self, ref_to_slice, ordered=True): - """ - Return an iterator of all subcubes given the coordinates or dimension + """Return an iterator of all subcubes given the coordinates or dimension indices desired to be present in each subcube. Args: @@ -3600,8 +3504,7 @@ def slices(self, ref_to_slice, ordered=True): return _SliceIterator(self, dims_index, dim_to_slice, ordered) def transpose(self, new_order=None): - """ - Re-order the data dimensions of the cube in-place. + """Re-order the data dimensions of the cube in-place. new_order - list of ints, optional By default, reverse the dimensions, otherwise permute the @@ -3659,10 +3562,7 @@ def remap_cube_metadata(metadata_and_dims): ) def xml(self, checksum=False, order=True, byteorder=True): - """ - Returns a fully valid CubeML string representation of the Cube. - - """ + """Returns a fully valid CubeML string representation of the Cube.""" doc = Document() cube_xml_element = self._xml_element( @@ -3835,8 +3735,7 @@ def _order(array): return cube_xml_element def copy(self, data=None): - """ - Returns a deep copy of this cube. + """Returns a deep copy of this cube. Kwargs: @@ -4003,8 +3902,7 @@ def __neg__(self): # END OPERATOR OVERLOADS def collapsed(self, coords, aggregator, **kwargs): - """ - Collapse one or more dimensions over the cube given the coordinate/s + """Collapse one or more dimensions over the cube given the coordinate/s and an aggregation. Examples of aggregations that may be used include @@ -4246,8 +4144,7 @@ def collapsed(self, coords, aggregator, **kwargs): return result def aggregated_by(self, coords, aggregator, climatological=False, **kwargs): - """ - Perform aggregation over the cube given one or more "group coordinates". + """Perform aggregation over the cube given one or more "group coordinates". A "group coordinate" is a coordinate where repeating values represent a single group, such as a month coordinate on a daily time slice. Repeated @@ -4536,8 +4433,7 @@ def aggregated_by(self, coords, aggregator, climatological=False, **kwargs): return aggregateby_cube def rolling_window(self, coord, aggregator, window, **kwargs): - """ - Perform rolling window aggregation on a cube given a coordinate, an + """Perform rolling window aggregation on a cube given a coordinate, an aggregation method and a window size. Args: @@ -4743,8 +4639,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): return result def interpolate(self, sample_points, scheme, collapse_scalar=True): - """ - Interpolate from this :class:`~iris.cube.Cube` to the given + """Interpolate from this :class:`~iris.cube.Cube` to the given sample points using the given interpolation scheme. Args: @@ -4830,8 +4725,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): return interp(points, collapse_scalar=collapse_scalar) def regrid(self, grid, scheme): - r""" - Regrid this :class:`~iris.cube.Cube` on to the given target `grid` + r"""Regrid this :class:`~iris.cube.Cube` on to the given target `grid` using the given regridding `scheme`. Args: @@ -4870,8 +4764,7 @@ def regrid(self, grid, scheme): class ClassDict(MutableMapping): - """ - A mapping that stores objects keyed on their superclasses and their names. + """A mapping that stores objects keyed on their superclasses and their names. The mapping has a root class, all stored objects must be a subclass of the root class. The superclasses used for an object include the class of the @@ -4946,8 +4839,7 @@ def keys(self): def sorted_axes(axes): - """ - Returns the axis names sorted alphabetically, with the exception that + """Returns the axis names sorted alphabetically, with the exception that 't', 'z', 'y', and, 'x' are sorted to the end. """ diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index ea788e7c18..8fad591658 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Exceptions specific to the Iris package. +"""Exceptions specific to the Iris package. """ @@ -70,8 +69,7 @@ class InvalidCubeError(IrisError): class ConstraintMismatchError(IrisError): - """ - Raised when a constraint operation has failed to find the correct number + """Raised when a constraint operation has failed to find the correct number of results. """ @@ -80,8 +78,7 @@ class ConstraintMismatchError(IrisError): class NotYetImplementedError(IrisError): - """ - Raised by missing functionality. + """Raised by missing functionality. Different meaning to NotImplementedError, which is for abstract methods. @@ -97,24 +94,19 @@ class TranslationError(IrisError): class IgnoreCubeException(IrisError): - """ - Raised from a callback function when a cube should be ignored on load. - - """ + """Raised from a callback function when a cube should be ignored on load.""" pass class ConcatenateError(IrisError): - """ - Raised when concatenate is expected to produce a single cube, but fails to + """Raised when concatenate is expected to produce a single cube, but fails to do so. """ def __init__(self, differences): - """ - Creates a ConcatenateError with a list of textual descriptions of + """Creates a ConcatenateError with a list of textual descriptions of the differences which prevented a concatenate. Args: @@ -132,15 +124,13 @@ def __str__(self): class MergeError(IrisError): - """ - Raised when merge is expected to produce a single cube, but fails to + """Raised when merge is expected to produce a single cube, but fails to do so. """ def __init__(self, differences): - """ - Creates a MergeError with a list of textual descriptions of + """Creates a MergeError with a list of textual descriptions of the differences which prevented a merge. Args: @@ -186,9 +176,7 @@ class CannotAddError(ValueError): class IrisUserWarning(UserWarning): - """ - Base class for :class:`UserWarning`\\ s generated by Iris. - """ + """Base class for :class:`UserWarning`\\ s generated by Iris.""" pass @@ -212,8 +200,7 @@ class IrisCfWarning(IrisUserWarning): class IrisIgnoringWarning(IrisUserWarning): - """ - Any warning that involves an Iris operation not using some information. + """Any warning that involves an Iris operation not using some information. E.g. :class:`~iris.aux_factory.AuxCoordFactory` generation disregarding bounds. @@ -223,8 +210,7 @@ class IrisIgnoringWarning(IrisUserWarning): class IrisDefaultingWarning(IrisUserWarning): - """ - Any warning that involves Iris changing invalid/missing information. + """Any warning that involves Iris changing invalid/missing information. E.g. creating a :class:`~iris.coords.AuxCoord` from an invalid :class:`~iris.coords.DimCoord` definition. @@ -246,8 +232,7 @@ class IrisUnsupportedPlottingWarning(IrisUserWarning): class IrisImpossibleUpdateWarning(IrisUserWarning): - """ - Warnings where it is not possible to update an object. + """Warnings where it is not possible to update an object. Mainly generated during regridding where the necessary information for updating an :class:`~iris.aux_factory.AuxCoordFactory` is no longer @@ -285,33 +270,25 @@ class IrisCfSaveWarning(IrisCfWarning, IrisSaveWarning): class IrisCfInvalidCoordParamWarning(IrisCfLoadWarning): - """ - Warnings where incorrect information for CF coord construction is in a file. - """ + """Warnings where incorrect information for CF coord construction is in a file.""" pass class IrisCfMissingVarWarning(IrisCfLoadWarning): - """ - Warnings where a CF variable references another variable that is not in the file. - """ + """Warnings where a CF variable references another variable that is not in the file.""" pass class IrisCfLabelVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): - """ - Warnings where a CF string/label variable is being used inappropriately. - """ + """Warnings where a CF string/label variable is being used inappropriately.""" pass class IrisCfNonSpanningVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): - """ - Warnings where a CF variable is ignored because it does not span the required dimension. - """ + """Warnings where a CF variable is ignored because it does not span the required dimension.""" pass @@ -320,48 +297,37 @@ class IrisCfNonSpanningVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): class IrisIgnoringBoundsWarning(IrisIgnoringWarning): - """ - Warnings where bounds information has not been used by an Iris operation. - """ + """Warnings where bounds information has not been used by an Iris operation.""" pass class IrisCannotAddWarning(IrisIgnoringWarning): - """ - Warnings where a member object cannot be added to a :class:`~iris.cube.Cube` . - """ + """Warnings where a member object cannot be added to a :class:`~iris.cube.Cube` .""" pass class IrisGuessBoundsWarning(IrisDefaultingWarning): - """ - Warnings where Iris has filled absent bounds information with a best estimate. - """ + """Warnings where Iris has filled absent bounds information with a best estimate.""" pass class IrisPpClimModifiedWarning(IrisSaveWarning, IrisDefaultingWarning): - """ - Warnings where a climatology has been modified while saving :term:`Post Processing (PP) Format` . - """ + """Warnings where a climatology has been modified while saving :term:`Post Processing (PP) Format` .""" pass class IrisFactoryCoordNotFoundWarning(IrisLoadWarning): - """ - Warnings where a referenced factory coord can not be found when loading a variable in :term:`NetCDF Format`. - """ + """Warnings where a referenced factory coord can not be found when loading a variable in :term:`NetCDF Format`.""" pass class IrisNimrodTranslationWarning(IrisLoadWarning): - """ - For unsupported vertical coord types in :mod:`iris.file_formats.nimrod_load_rules`. + """For unsupported vertical coord types in :mod:`iris.file_formats.nimrod_load_rules`. (Pre-dates the full categorisation of Iris UserWarnings). """ @@ -370,8 +336,7 @@ class IrisNimrodTranslationWarning(IrisLoadWarning): class IrisUnknownCellMethodWarning(IrisCfLoadWarning): - """ - If a loaded :class:`~iris.coords.CellMethod` is not one the method names known to Iris. + """If a loaded :class:`~iris.coords.CellMethod` is not one the method names known to Iris. (Pre-dates the full categorisation of Iris UserWarnings). """ @@ -380,8 +345,7 @@ class IrisUnknownCellMethodWarning(IrisCfLoadWarning): class IrisSaverFillValueWarning(IrisMaskValueMatchWarning, IrisSaveWarning): - """ - For fill value complications during Iris file saving :term:`NetCDF Format`. + """For fill value complications during Iris file saving :term:`NetCDF Format`. (Pre-dates the full categorisation of Iris UserWarnings). """ diff --git a/lib/iris/experimental/__init__.py b/lib/iris/experimental/__init__.py index 4c7c62b4f5..eea4259355 100644 --- a/lib/iris/experimental/__init__.py +++ b/lib/iris/experimental/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Experimental code can be introduced to Iris through this package. +"""Experimental code can be introduced to Iris through this package. Changes to experimental code may be more extensive than in the rest of the codebase. The code is expected to graduate, eventually, to "full status". diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py index ba4e9e6050..b660ae2a1a 100644 --- a/lib/iris/experimental/animate.py +++ b/lib/iris/experimental/animate.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Wrapper for animating iris cubes using iris or matplotlib plotting functions +"""Wrapper for animating iris cubes using iris or matplotlib plotting functions Notes ----- @@ -17,8 +16,7 @@ def animate(cube_iterator, plot_func, fig=None, **kwargs): - """ - Animates the given cube iterator. + """Animates the given cube iterator. Warnings -------- diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 7f824df690..16421947f6 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Experimental module for importing/exporting raster data from Iris cubes using +"""Experimental module for importing/exporting raster data from Iris cubes using the GDAL library. See also: `GDAL - Geospatial Data Abstraction Library `_. @@ -42,8 +41,7 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, ftype): - """ - Use GDAL WriteArray to export data as a 32-bit raster image. + """Use GDAL WriteArray to export data as a 32-bit raster image. Requires the array data to be of the form: North-at-top and West-on-left. @@ -99,8 +97,7 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, f def export_geotiff(cube, fname): - """ - Writes cube data to raster file format as a PixelIsArea GeoTiff image. + """Writes cube data to raster file format as a PixelIsArea GeoTiff image. .. deprecated:: 3.2.0 diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 372fec7a9f..b0ebfc5679 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Regridding functions. +"""Regridding functions. .. note:: @@ -52,8 +51,7 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): - """ - Return a new cube with data values calculated using the area weighted + """Return a new cube with data values calculated using the area weighted mean of data values from src_grid regridded onto the horizontal grid of grid_cube. @@ -127,8 +125,7 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): - r""" - Return a new cube with the data values calculated using the weighted + r"""Return a new cube with the data values calculated using the weighted mean of data values from :data:`src_cube` and the weights from :data:`weights` regridded onto the horizontal grid of :data:`grid_cube`. @@ -204,8 +201,7 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): class PointInCell: - """ - This class describes the point-in-cell regridding scheme for use + """This class describes the point-in-cell regridding scheme for use typically with :meth:`iris.cube.Cube.regrid()`. .. warning:: @@ -218,8 +214,7 @@ class PointInCell: """ def __init__(self, weights=None): - """ - Point-in-cell regridding scheme suitable for regridding over one + """Point-in-cell regridding scheme suitable for regridding over one or more orthogonal coordinates. .. warning:: @@ -240,14 +235,10 @@ def __init__(self, weights=None): class _ProjectedUnstructuredRegridder: - """ - This class provides regridding that uses scipy.interpolate.griddata. - - """ + """This class provides regridding that uses scipy.interpolate.griddata.""" def __init__(self, src_cube, tgt_grid_cube, method, projection=None): - """ - Create a regridder for conversions between the source + """Create a regridder for conversions between the source and target grids. Args: @@ -347,10 +338,7 @@ def _regrid( projection, method, ): - """ - Regrids input data from the source to the target. Calculation is. - - """ + """Regrids input data from the source to the target. Calculation is.""" # Transform coordinates into the projection the interpolation will be # performed in. src_projection = src_x_coord.coord_system.as_cartopy_projection() @@ -404,8 +392,7 @@ def _create_cube( grid_y_coord, regrid_callback, ): - """ - Return a new Cube for the result of regridding the source Cube onto + """Return a new Cube for the result of regridding the source Cube onto the new grid. All the metadata and coordinates of the result Cube are copied from @@ -530,8 +517,7 @@ def regrid_reference_surface( return result def __call__(self, src_cube): - """ - Regrid this :class:`~iris.cube.Cube` on to the target grid of + """Regrid this :class:`~iris.cube.Cube` on to the target grid of this :class:`UnstructuredProjectedRegridder`. The given cube must be defined with the same grid as the source @@ -612,8 +598,7 @@ def __call__(self, src_cube): class ProjectedUnstructuredLinear: - """ - This class describes the linear regridding scheme which uses the + """This class describes the linear regridding scheme which uses the scipy.interpolate.griddata to regrid unstructured data on to a grid. The source cube and the target cube will be projected into a common @@ -622,8 +607,7 @@ class ProjectedUnstructuredLinear: """ def __init__(self, projection=None): - """ - Linear regridding scheme that uses scipy.interpolate.griddata on + """Linear regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. .. note:: @@ -653,8 +637,7 @@ def __init__(self, projection=None): warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): - """ - Creates a linear regridder to perform regridding, using + """Creates a linear regridder to perform regridding, using scipy.interpolate.griddata from unstructured source points to the target grid. The regridding calculation is performed in the given projection. @@ -689,8 +672,7 @@ def regridder(self, src_cube, target_grid): class ProjectedUnstructuredNearest: - """ - This class describes the nearest regridding scheme which uses the + """This class describes the nearest regridding scheme which uses the scipy.interpolate.griddata to regrid unstructured data on to a grid. The source cube and the target cube will be projected into a common @@ -705,8 +687,7 @@ class ProjectedUnstructuredNearest: """ def __init__(self, projection=None): - """ - Nearest regridding scheme that uses scipy.interpolate.griddata on + """Nearest regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. .. note:: @@ -738,8 +719,7 @@ def __init__(self, projection=None): warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): - """ - Creates a nearest-neighbour regridder to perform regridding, using + """Creates a nearest-neighbour regridder to perform regridding, using scipy.interpolate.griddata from unstructured source points to the target grid. The regridding calculation is performed in the given projection. diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index a06aba986e..45ac0505ed 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Support for conservative regridding via ESMPy. +"""Support for conservative regridding via ESMPy. .. note:: @@ -41,8 +40,7 @@ def _convert_latlons(crs, x_array, y_array): - """ - Convert x+y coords in a given crs to (x,y) values in true-lat-lons. + """Convert x+y coords in a given crs to (x,y) values in true-lat-lons. .. note:: @@ -55,8 +53,7 @@ def _convert_latlons(crs, x_array, y_array): def _make_esmpy_field(x_coord, y_coord, ref_name="field", data=None, mask=None): - """ - Create an ESMPy ESMF.Field on given coordinates. + """Create an ESMPy ESMF.Field on given coordinates. Create a ESMF.Grid from the coordinates, defining corners and centre positions as lats+lons. @@ -146,8 +143,7 @@ def _make_esmpy_field(x_coord, y_coord, ref_name="field", data=None, mask=None): def regrid_conservative_via_esmpy(source_cube, grid_cube): - """ - Perform a conservative regridding with ESMPy. + """Perform a conservative regridding with ESMPy. .. note :: diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index bc6e02f4b8..9acd926eb2 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Definitions of how Iris objects should be represented. +"""Definitions of how Iris objects should be represented. """ @@ -13,8 +12,7 @@ class CubeRepresentation: - """ - Produce representations of a :class:`~iris.cube.Cube`. + """Produce representations of a :class:`~iris.cube.Cube`. This includes: @@ -120,8 +118,7 @@ def __init__(self, cube): self.units = escape(str(self.cube.units)) def _get_dim_names(self): - """ - Get dimension-describing coordinate names, or '--' if no coordinate] + """Get dimension-describing coordinate names, or '--' if no coordinate] describes the dimension. Note: borrows from `cube.summary`. @@ -151,8 +148,7 @@ def _get_lines(self): return self.cube_str.split("\n") def _get_bits(self, bits): - """ - Parse the body content (`bits`) of the cube string in preparation for + """Parse the body content (`bits`) of the cube string in preparation for being converted into table rows. """ @@ -181,8 +177,7 @@ def _get_bits(self, bits): self.sections_data[str_heading_name] = content def _make_header(self): - """ - Make the table header. This is similar to the summary of the cube, + """Make the table header. This is similar to the summary of the cube, but does not include dim shapes. These are included on the next table row down, and produced with `make_shapes_row`. @@ -206,8 +201,7 @@ def _make_shapes_row(self): return "\n".join(cell for cell in cells) def _make_row(self, title, body=None, col_span=0): - """ - Produce one row for the table body; i.e. + """Produce one row for the table body; i.e. Coord namex-... `body` contains the content for each cell not in the left-most (title) diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index d3ba6bfecb..d79102cc08 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Routines for putting data on new strata (aka. isosurfaces), often in the +"""Routines for putting data on new strata (aka. isosurfaces), often in the Z direction. """ @@ -18,8 +17,7 @@ def _copy_coords_without_z_dim(src, tgt, z_dim): - """ - Helper function to copy across non z-dimenson coordinates between cubes. + """Helper function to copy across non z-dimenson coordinates between cubes. Parameters ---------- @@ -54,8 +52,7 @@ def _copy_coords_without_z_dim(src, tgt, z_dim): def relevel(cube, src_levels, tgt_levels, axis=None, interpolator=None): - """ - Interpolate the cube onto the specified target levels, given the + """Interpolate the cube onto the specified target levels, given the source levels of the cube. For example, suppose we have two datasets `P(i,j,k)` and `H(i,j,k)` diff --git a/lib/iris/experimental/ugrid/__init__.py b/lib/iris/experimental/ugrid/__init__.py index 30a934dfba..58695912c6 100644 --- a/lib/iris/experimental/ugrid/__init__.py +++ b/lib/iris/experimental/ugrid/__init__.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Infra-structure for unstructured mesh support, based on +"""Infra-structure for unstructured mesh support, based on CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/ .. note:: diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index 959548870e..10e76cc11b 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Extensions to Iris' CF variable representation to represent CF UGrid variables. +"""Extensions to Iris' CF variable representation to represent CF UGrid variables. Eventual destination: :mod:`iris.fileformats.cf`. @@ -17,8 +16,7 @@ class CFUGridConnectivityVariable(cf.CFVariable): - """ - A CF_UGRID connectivity variable points to an index variable identifying + """A CF_UGRID connectivity variable points to an index variable identifying for every element (edge/face/volume) the indices of its corner nodes. The connectivity array will thus be a matrix of size n-elements x n-corners. For the indexing one may use either 0- or 1-based indexing; the convention @@ -88,8 +86,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridAuxiliaryCoordinateVariable(cf.CFVariable): - """ - A CF-UGRID auxiliary coordinate variable is a CF-netCDF auxiliary + """A CF-UGRID auxiliary coordinate variable is a CF-netCDF auxiliary coordinate variable representing the element (node/edge/face/volume) locations (latitude, longitude or other spatial coordinates, and optional elevation or other coordinates). These auxiliary coordinate variables will @@ -166,8 +163,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridMeshVariable(cf.CFVariable): - """ - A CF-UGRID mesh variable is a dummy variable for storing topology + """A CF-UGRID mesh variable is a dummy variable for storing topology information as attributes. The mesh variable has the ``cf_role`` 'mesh_topology'. @@ -234,8 +230,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridGroup(cf.CFGroup): - """ - Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata + """Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata Conventions' variables and netCDF global attributes. Specialisation of :class:`~iris.fileformats.cf.CFGroup` that includes extra @@ -260,8 +255,7 @@ def meshes(self): @property def non_data_variable_names(self): - """ - :class:`set` of the names of the CF-netCDF/CF-UGRID variables that are + """:class:`set` of the names of the CF-netCDF/CF-UGRID variables that are not the data pay-load. """ @@ -273,8 +267,7 @@ def non_data_variable_names(self): class CFUGridReader(cf.CFReader): - """ - This class allows the contents of a netCDF file to be interpreted according + """This class allows the contents of a netCDF file to be interpreted according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. Specialisation of :class:`~iris.fileformats.cf.CFReader` that can also diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index 001ef0bb9b..317d64ca04 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Extensions to Iris' NetCDF loading to allow the construction of +"""Extensions to Iris' NetCDF loading to allow the construction of :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es from UGRID data in the file. Eventual destination: :mod:`iris.fileformats.netcdf`. @@ -49,8 +48,7 @@ class _WarnComboCfDefaultingIgnoring(_WarnComboCfDefaulting, IrisIgnoringWarning class ParseUGridOnLoad(threading.local): def __init__(self): - """ - A flag for dictating whether to use the experimental UGRID-aware + """A flag for dictating whether to use the experimental UGRID-aware version of Iris NetCDF loading. Object is thread-safe. Use via the run-time switch @@ -70,8 +68,7 @@ def __bool__(self): @contextmanager def context(self): - """ - Temporarily activate experimental UGRID-aware NetCDF loading. + """Temporarily activate experimental UGRID-aware NetCDF loading. Use the standard Iris loading API while within the context manager. If the loaded file(s) include any UGRID content, this will be parsed and @@ -100,8 +97,7 @@ def context(self): def _meshes_from_cf(cf_reader): - """ - Common behaviour for extracting meshes from a CFReader. + """Common behaviour for extracting meshes from a CFReader. Simple now, but expected to increase in complexity as Mesh sharing develops. @@ -118,8 +114,7 @@ def _meshes_from_cf(cf_reader): def load_mesh(uris, var_name=None): - """ - Load a single :class:`~iris.experimental.ugrid.mesh.Mesh` object from one or more NetCDF files. + """Load a single :class:`~iris.experimental.ugrid.mesh.Mesh` object from one or more NetCDF files. Raises an error if more/less than one :class:`~iris.experimental.ugrid.mesh.Mesh` is found. @@ -148,8 +143,7 @@ def load_mesh(uris, var_name=None): def load_meshes(uris, var_name=None): - """ - Load :class:`~iris.experimental.ugrid.mesh.Mesh` objects from one or more NetCDF files. + """Load :class:`~iris.experimental.ugrid.mesh.Mesh` objects from one or more NetCDF files. Parameters ---------- @@ -236,8 +230,7 @@ def load_meshes(uris, var_name=None): def _build_aux_coord(coord_var, file_path): - """ - Construct a :class:`~iris.coords.AuxCoord` from a given + """Construct a :class:`~iris.coords.AuxCoord` from a given :class:`~iris.experimental.ugrid.cf.CFUGridAuxiliaryCoordinateVariable`, and guess its mesh axis. @@ -290,8 +283,7 @@ def _build_aux_coord(coord_var, file_path): def _build_connectivity(connectivity_var, file_path, element_dims): - """ - Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity` from a + """Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity` from a given :class:`~iris.experimental.ugrid.cf.CFUGridConnectivityVariable`, and identify the name of its first dimension. @@ -333,8 +325,7 @@ def _build_connectivity(connectivity_var, file_path, element_dims): def _build_mesh(cf, mesh_var, file_path): - """ - Construct a :class:`~iris.experimental.ugrid.mesh.Mesh` from a given + """Construct a :class:`~iris.experimental.ugrid.mesh.Mesh` from a given :class:`~iris.experimental.ugrid.cf.CFUGridMeshVariable`. todo: integrate with standard loading API post-pyke. @@ -467,8 +458,7 @@ def _build_mesh(cf, mesh_var, file_path): def _build_mesh_coords(mesh, cf_var): - """ - Construct a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` using + """Construct a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` using from a given :class:`~iris.experimental.ugrid.mesh.Mesh` and :class:`~iris.fileformats.cf.CFVariable`. diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index cd5a440cd0..02b26f41f1 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Iris' data model representation of CF UGrid's Mesh and its constituent parts. +"""Iris' data model representation of CF UGrid's Mesh and its constituent parts. Eventual destination: dedicated module in :mod:`iris` root. @@ -85,8 +84,7 @@ class Connectivity(_DimensionalMetadata): - """ - A CF-UGRID topology connectivity, describing the topological relationship + """A CF-UGRID topology connectivity, describing the topological relationship between two types of mesh element. One or more connectivities make up a CF-UGRID topology - a constituent of a CF-UGRID mesh. @@ -119,8 +117,7 @@ def __init__( start_index=0, location_axis=0, ): - """ - Constructs a single connectivity. + """Constructs a single connectivity. Args: @@ -221,8 +218,7 @@ def _values(self, values): @property def cf_role(self): - """ - The category of topological relationship that this connectivity + """The category of topological relationship that this connectivity describes. **Read-only** - validity of :attr:`indices` is dependent on :attr:`cf_role`. A new :class:`Connectivity` must therefore be defined @@ -233,8 +229,7 @@ def cf_role(self): @property def location(self): - """ - Derived from the connectivity's :attr:`cf_role` - the first part, e.g. + """Derived from the connectivity's :attr:`cf_role` - the first part, e.g. ``face`` in ``face_node_connectivity``. Refers to the elements that vary along the :attr:`location_axis` of the connectivity's :attr:`indices` array. @@ -244,8 +239,7 @@ def location(self): @property def connected(self): - """ - Derived from the connectivity's :attr:`cf_role` - the second part, e.g. + """Derived from the connectivity's :attr:`cf_role` - the second part, e.g. ``node`` in ``face_node_connectivity``. Refers to the elements indexed by the values in the connectivity's :attr:`indices` array. @@ -254,8 +248,7 @@ def connected(self): @property def start_index(self): - """ - The base value of the connectivity's :attr:`indices` array; either + """The base value of the connectivity's :attr:`indices` array; either ``0`` or ``1``. **Read-only** - validity of :attr:`indices` is dependent on :attr:`start_index`. A new :class:`Connectivity` must therefore be @@ -266,8 +259,7 @@ def start_index(self): @property def location_axis(self): - """ - The axis of the connectivity's :attr:`indices` array that varies + """The axis of the connectivity's :attr:`indices` array that varies over the connectivity's :attr:`location` elements. Either ``0`` or ``1``. **Read-only** - validity of :attr:`indices` is dependent on :attr:`location_axis`. Use :meth:`transpose` to create a new, transposed @@ -278,8 +270,7 @@ def location_axis(self): @property def connected_axis(self): - """ - Derived as the alternate value of :attr:`location_axis` - each must + """Derived as the alternate value of :attr:`location_axis` - each must equal either ``0`` or ``1``. The axis of the connectivity's :attr:`indices` array that varies over the :attr:`connected` elements associated with each :attr:`location` element. @@ -289,8 +280,7 @@ def connected_axis(self): @property def indices(self): - """ - The index values describing the topological relationship of the + """The index values describing the topological relationship of the connectivity, as a NumPy array. Masked points indicate a :attr:`location` element with fewer :attr:`connected` elements than other :attr:`location` elements described in this array - unused index @@ -304,8 +294,7 @@ def indices(self): return self._values def indices_by_location(self, indices=None): - """ - Return a view of the indices array with :attr:`location_axis` **always** as + """Return a view of the indices array with :attr:`location_axis` **always** as the first axis - transposed if necessary. Can optionally pass in an identically shaped array on which to perform this operation (e.g. the output from :meth:`core_indices` or :meth:`lazy_indices`). @@ -402,8 +391,7 @@ def indices_error(message): ) def validate_indices(self): - """ - Perform a thorough validity check of this connectivity's + """Perform a thorough validity check of this connectivity's :attr:`indices`. Includes checking the number of :attr:`connected` elements associated with each :attr:`location` element (specified using masks on the :attr:`indices` array) against the :attr:`cf_role`. @@ -447,8 +435,7 @@ def __eq__(self, other): return eq def transpose(self): - """ - Create a new :class:`Connectivity`, identical to this one but with the + """Create a new :class:`Connectivity`, identical to this one but with the :attr:`indices` array transposed and the :attr:`location_axis` value flipped. Returns: @@ -470,8 +457,7 @@ def transpose(self): return new_connectivity def lazy_indices(self): - """ - Return a lazy array representing the connectivity's indices. + """Return a lazy array representing the connectivity's indices. Accessing this method will never cause the :attr:`indices` values to be loaded. Similarly, calling methods on, or indexing, the returned Array @@ -487,8 +473,7 @@ def lazy_indices(self): return super()._lazy_values() def core_indices(self): - """ - The indices array at the core of this connectivity, which may be a + """The indices array at the core of this connectivity, which may be a NumPy array or a Dask array. Returns: @@ -498,8 +483,7 @@ def core_indices(self): return super()._core_values() def has_lazy_indices(self): - """ - Return a boolean indicating whether the connectivity's :attr:`indices` + """Return a boolean indicating whether the connectivity's :attr:`indices` array is a lazy Dask array or not. Returns: @@ -509,8 +493,7 @@ def has_lazy_indices(self): return super()._has_lazy_values() def lazy_location_lengths(self): - """ - Return a lazy array representing the number of :attr:`connected` + """Return a lazy array representing the number of :attr:`connected` elements associated with each of the connectivity's :attr:`location` elements, accounting for masks if present. @@ -533,8 +516,7 @@ def lazy_location_lengths(self): return max_location_size - location_mask_counts def location_lengths(self): - """ - Return a NumPy array representing the number of :attr:`connected` + """Return a NumPy array representing the number of :attr:`connected` elements associated with each of the connectivity's :attr:`location` elements, accounting for masks if present. @@ -562,8 +544,7 @@ def xml_element(self, doc): class Mesh(CFVariableMixin): - """ - A container representing the UGRID ``cf_role`` ``mesh_topology``, supporting + """A container representing the UGRID ``cf_role`` ``mesh_topology``, supporting 1D network, 2D triangular, and 2D flexible mesh topologies. .. note:: @@ -601,7 +582,8 @@ def __init__( edge_dimension=None, face_dimension=None, ): - """ + """Mesh initialise. + .. note:: The purpose of the :attr:`node_dimension`, :attr:`edge_dimension` and @@ -677,8 +659,7 @@ def normalise(element, axis): @classmethod def from_coords(cls, *coords): - """ - Construct a :class:`Mesh` by derivation from one or more + """Construct a :class:`Mesh` by derivation from one or more :class:`~iris.coords.Coord`\\ s. The :attr:`~Mesh.topology_dimension`, :class:`~iris.coords.Coord` @@ -906,8 +887,7 @@ def __ne__(self, other): return result def summary(self, shorten=False): - """ - Return a string representation of the Mesh. + """Return a string representation of the Mesh. Parameters ---------- @@ -1076,8 +1056,7 @@ def _set_dimension_names(self, node, edge, face, reset=False): @property def all_connectivities(self): - """ - All the :class:`~iris.experimental.ugrid.mesh.Connectivity` instances + """All the :class:`~iris.experimental.ugrid.mesh.Connectivity` instances of the :class:`Mesh`. """ @@ -1085,16 +1064,12 @@ def all_connectivities(self): @property def all_coords(self): - """ - All the :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`. - - """ + """All the :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`.""" return self._coord_manager.all_members @property def boundary_node_connectivity(self): - """ - The *optional* UGRID ``boundary_node_connectivity`` + """The *optional* UGRID ``boundary_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1103,8 +1078,7 @@ def boundary_node_connectivity(self): @property def edge_coords(self): - """ - The *optional* UGRID ``edge`` :class:`~iris.coords.AuxCoord` coordinates + """The *optional* UGRID ``edge`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`. """ @@ -1112,8 +1086,7 @@ def edge_coords(self): @property def edge_dimension(self): - """ - The *optionally required* UGRID NetCDF variable name for the ``edge`` + """The *optionally required* UGRID NetCDF variable name for the ``edge`` dimension. """ @@ -1129,8 +1102,7 @@ def edge_dimension(self, name): @property def edge_face_connectivity(self): - """ - The *optional* UGRID ``edge_face_connectivity`` + """The *optional* UGRID ``edge_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1139,8 +1111,7 @@ def edge_face_connectivity(self): @property def edge_node_connectivity(self): - """ - The UGRID ``edge_node_connectivity`` + """The UGRID ``edge_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`, which is **required** for :attr:`Mesh.topology_dimension` of ``1``, and *optionally required* for @@ -1151,8 +1122,7 @@ def edge_node_connectivity(self): @property def face_coords(self): - """ - The *optional* UGRID ``face`` :class:`~iris.coords.AuxCoord` coordinates + """The *optional* UGRID ``face`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`. """ @@ -1160,8 +1130,7 @@ def face_coords(self): @property def face_dimension(self): - """ - The *optionally required* UGRID NetCDF variable name for the ``face`` + """The *optionally required* UGRID NetCDF variable name for the ``face`` dimension. """ @@ -1186,8 +1155,7 @@ def face_dimension(self, name): @property def face_edge_connectivity(self): - """ - The *optional* UGRID ``face_edge_connectivity`` + """The *optional* UGRID ``face_edge_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1197,8 +1165,7 @@ def face_edge_connectivity(self): @property def face_face_connectivity(self): - """ - The *optional* UGRID ``face_face_connectivity`` + """The *optional* UGRID ``face_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1207,8 +1174,7 @@ def face_face_connectivity(self): @property def face_node_connectivity(self): - """ - The UGRID ``face_node_connectivity`` + """The UGRID ``face_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`, which is **required** for :attr:`Mesh.topology_dimension` of ``2``, and *optionally required* for :attr:`Mesh.topology_dimension` @@ -1219,8 +1185,7 @@ def face_node_connectivity(self): @property def node_coords(self): - """ - The **required** UGRID ``node`` :class:`~iris.coords.AuxCoord` coordinates + """The **required** UGRID ``node`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`. """ @@ -1240,8 +1205,7 @@ def node_dimension(self, name): self._metadata_manager.node_dimension = node_dimension def add_connectivities(self, *connectivities): - """ - Add one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` instances to the :class:`Mesh`. + """Add one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` instances to the :class:`Mesh`. Args: @@ -1262,8 +1226,7 @@ def add_coords( face_x=None, face_y=None, ): - """ - Add one or more :class:`~iris.coords.AuxCoord` coordinates to the :class:`Mesh`. + """Add one or more :class:`~iris.coords.AuxCoord` coordinates to the :class:`Mesh`. Kwargs: @@ -1312,8 +1275,7 @@ def connectivities( contains_edge=None, contains_face=None, ): - """ - Return all :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Return all :class:`~iris.experimental.ugrid.mesh.Connectivity` instances from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1407,8 +1369,7 @@ def connectivity( contains_edge=None, contains_face=None, ): - """ - Return a single :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Return a single :class:`~iris.experimental.ugrid.mesh.Connectivity` from the :class:`Mesh` that matches the provided criteria. Criteria can be either specific properties or other objects with @@ -1509,8 +1470,7 @@ def coord( include_edges=None, include_faces=None, ): - """ - Return a single :class:`~iris.coords.AuxCoord` coordinate from the + """Return a single :class:`~iris.coords.AuxCoord` coordinate from the :class:`Mesh` that matches the provided criteria. Criteria can be either specific properties or other objects with @@ -1599,8 +1559,7 @@ def coords( include_edges=None, include_faces=None, ): - """ - Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh` that + """Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1684,8 +1643,7 @@ def remove_connectivities( contains_edge=None, contains_face=None, ): - """ - Remove one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Remove one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1775,8 +1733,7 @@ def remove_coords( include_edges=None, include_faces=None, ): - """ - Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh` + """Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1852,8 +1809,7 @@ def remove_coords( return self._coord_manager.remove(**kwargs) def xml_element(self, doc): - """ - Create the :class:`xml.dom.minidom.Element` that describes this + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`Mesh`. Args: @@ -1883,8 +1839,7 @@ def xml_element(self, doc): # # return the lazy AuxCoord(...), AuxCoord(...) def to_MeshCoord(self, location, axis): - """ - Generate a :class:`~iris.experimental.ugrid.mesh.MeshCoord` that + """Generate a :class:`~iris.experimental.ugrid.mesh.MeshCoord` that references the current :class:`Mesh`, and passing through the ``location`` and ``axis`` arguments. @@ -1910,8 +1865,7 @@ def to_MeshCoord(self, location, axis): return MeshCoord(mesh=self, location=location, axis=axis) def to_MeshCoords(self, location): - """ - Generate a tuple of + """Generate a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord`\\ s, each referencing the current :class:`Mesh`, one for each :attr:`AXES` value, passing through the ``location`` argument. @@ -1936,8 +1890,7 @@ def to_MeshCoords(self, location): return tuple(result) def dimension_names_reset(self, node=False, edge=False, face=False): - """ - Reset the name used for the NetCDF variable representing the ``node``, + """Reset the name used for the NetCDF variable representing the ``node``, ``edge`` and/or ``face`` dimension to ``None``. Kwargs: @@ -1958,8 +1911,7 @@ def dimension_names_reset(self, node=False, edge=False, face=False): return self._set_dimension_names(node, edge, face, reset=True) def dimension_names(self, node=None, edge=None, face=None): - """ - Assign the name to be used for the NetCDF variable representing + """Assign the name to be used for the NetCDF variable representing the ``node``, ``edge`` and ``face`` dimension. The default value of ``None`` will not be assigned to clear the @@ -1990,8 +1942,7 @@ def cf_role(self): @property def topology_dimension(self): - """ - The UGRID ``topology_dimension`` attribute represents the highest + """The UGRID ``topology_dimension`` attribute represents the highest dimensionality of all the geometric elements (node, edge, face) represented within the :class:`Mesh`. @@ -2000,9 +1951,7 @@ def topology_dimension(self): class _Mesh1DCoordinateManager: - """ - - TBD: require clarity on coord_systems validation + """TBD: require clarity on coord_systems validation TBD: require clarity on __eq__ support TBD: rationalise self.coords() logic with other manager and Cube @@ -2191,8 +2140,7 @@ def _add(self, coords): setattr(self, member_y, coords[1]) def add(self, node_x=None, node_y=None, edge_x=None, edge_y=None): - """ - use self.remove(edge_x=True) to remove a coordinate e.g., using the + """use self.remove(edge_x=True) to remove a coordinate e.g., using the pattern self.add(edge_x=None) will not remove the edge_x coordinate """ @@ -2682,8 +2630,7 @@ def face_node(self): class MeshCoord(AuxCoord): - """ - Geographic coordinate values of data on an unstructured mesh. + """Geographic coordinate values of data on an unstructured mesh. A MeshCoord references a `~iris.experimental.ugrid.mesh.Mesh`. When contained in a `~iris.cube.Cube` it connects the cube to the Mesh. @@ -2878,8 +2825,7 @@ def __getitem__(self, keys): return self.copy() def copy(self, points=None, bounds=None): - """ - Make a copy of the MeshCoord. + """Make a copy of the MeshCoord. Kwargs: @@ -2904,8 +2850,7 @@ def copy(self, points=None, bounds=None): return new_coord def __deepcopy__(self, memo): - """ - Make this equivalent to "shallow" copy, returning a new MeshCoord based + """Make this equivalent to "shallow" copy, returning a new MeshCoord based on the same Mesh. Required to prevent cube copying from copying the Mesh, which would @@ -2991,8 +2936,7 @@ def summary(self, *args, **kwargs): return result def _construct_access_arrays(self): - """ - Build lazy points and bounds arrays, providing dynamic access via the + """Build lazy points and bounds arrays, providing dynamic access via the Mesh, according to the location and axis. Returns: diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py index 231803fd74..153f71bfcb 100644 --- a/lib/iris/experimental/ugrid/metadata.py +++ b/lib/iris/experimental/ugrid/metadata.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -The common metadata API classes for :mod:`iris.experimental.ugrid.mesh`. +"""The common metadata API classes for :mod:`iris.experimental.ugrid.mesh`. Eventual destination: :mod:`iris.common.metadata`. @@ -22,10 +21,7 @@ class ConnectivityMetadata(BaseMetadata): - """ - Metadata container for a :class:`~iris.experimental.ugrid.mesh.Connectivity`. - - """ + """Metadata container for a :class:`~iris.experimental.ugrid.mesh.Connectivity`.""" # The "location_axis" member is stateful only, and does not participate in # lenient/strict equivalence. @@ -39,8 +35,7 @@ def __eq__(self, other): return super().__eq__(other) def _combine_lenient(self, other): - """ - Perform lenient combination of metadata members for connectivities. + """Perform lenient combination of metadata members for connectivities. Args: @@ -68,8 +63,7 @@ def func(field): return result def _compare_lenient(self, other): - """ - Perform lenient equality of metadata members for connectivities. + """Perform lenient equality of metadata members for connectivities. Args: @@ -97,8 +91,7 @@ def _compare_lenient(self, other): return result def _difference_lenient(self, other): - """ - Perform lenient difference of metadata members for connectivities. + """Perform lenient difference of metadata members for connectivities. Args: @@ -142,10 +135,7 @@ def equal(self, other, lenient=None): class MeshMetadata(BaseMetadata): - """ - Metadata container for a :class:`~iris.experimental.ugrid.mesh.Mesh`. - - """ + """Metadata container for a :class:`~iris.experimental.ugrid.mesh.Mesh`.""" # The node_dimension", "edge_dimension" and "face_dimension" members are # stateful only; they not participate in lenient/strict equivalence. @@ -164,8 +154,7 @@ def __eq__(self, other): return super().__eq__(other) def _combine_lenient(self, other): - """ - Perform lenient combination of metadata members for meshes. + """Perform lenient combination of metadata members for meshes. Args: @@ -194,8 +183,7 @@ def func(field): return result def _compare_lenient(self, other): - """ - Perform lenient equality of metadata members for meshes. + """Perform lenient equality of metadata members for meshes. Args: @@ -218,8 +206,7 @@ def _compare_lenient(self, other): return result def _difference_lenient(self, other): - """ - Perform lenient difference of metadata members for meshes. + """Perform lenient difference of metadata members for meshes. Args: @@ -264,9 +251,7 @@ def equal(self, other, lenient=None): class MeshCoordMetadata(BaseMetadata): - """ - Metadata container for a :class:`~iris.coords.MeshCoord`. - """ + """Metadata container for a :class:`~iris.coords.MeshCoord`.""" _members = ("location", "axis") # NOTE: in future, we may add 'mesh' as part of this metadata, @@ -284,8 +269,7 @@ def __eq__(self, other): return super().__eq__(other) def _combine_lenient(self, other): - """ - Perform lenient combination of metadata members for MeshCoord. + """Perform lenient combination of metadata members for MeshCoord. Args: @@ -312,8 +296,7 @@ def func(field): return result def _compare_lenient(self, other): - """ - Perform lenient equality of metadata members for MeshCoord. + """Perform lenient equality of metadata members for MeshCoord. Args: @@ -336,8 +319,7 @@ def _compare_lenient(self, other): return result def _difference_lenient(self, other): - """ - Perform lenient difference of metadata members for MeshCoord. + """Perform lenient difference of metadata members for MeshCoord. Args: diff --git a/lib/iris/experimental/ugrid/save.py b/lib/iris/experimental/ugrid/save.py index d10a967014..00891b3044 100644 --- a/lib/iris/experimental/ugrid/save.py +++ b/lib/iris/experimental/ugrid/save.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Extensions to Iris' NetCDF saving to allow +"""Extensions to Iris' NetCDF saving to allow :class:`~iris.experimental.ugrid.mesh.Mesh` saving in UGRID format. Eventual destination: :mod:`iris.fileformats.netcdf`. @@ -16,8 +15,7 @@ def save_mesh(mesh, filename, netcdf_format="NETCDF4"): - """ - Save mesh(es) to a netCDF file. + """Save mesh(es) to a netCDF file. Args: diff --git a/lib/iris/experimental/ugrid/utils.py b/lib/iris/experimental/ugrid/utils.py index fae14687aa..fc0464077a 100644 --- a/lib/iris/experimental/ugrid/utils.py +++ b/lib/iris/experimental/ugrid/utils.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Utility operations specific to unstructured data. +"""Utility operations specific to unstructured data. """ from typing import AnyStr, Iterable, Union @@ -20,8 +19,7 @@ def recombine_submeshes( submesh_cubes: Union[Iterable[Cube], Cube], index_coord_name: AnyStr = "i_mesh_index", ) -> Cube: - """ - Put data from sub-meshes back onto the original full mesh. + """Put data from sub-meshes back onto the original full mesh. The result is a cube like ``mesh_cube``, but with its data replaced by a combination of the data in the ``submesh_cubes``. diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index 0854b46151..cd967881c7 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -A package for converting cubes to and from specific file formats. +"""A package for converting cubes to and from specific file formats. """ diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 5c32c23757..88bbfd45d5 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides UK Met Office Fields File (FF) format specific capabilities. +"""Provides UK Met Office Fields File (FF) format specific capabilities. """ @@ -128,8 +127,7 @@ class _WarnComboLoadingDefaulting(IrisDefaultingWarning, IrisLoadWarning): class Grid: - """ - An abstract class representing the default/file-level grid + """An abstract class representing the default/file-level grid definition for a FieldsFile. """ @@ -141,8 +139,7 @@ def __init__( real_constants, horiz_grid_type, ): - """ - Create a Grid from the relevant sections of the FFHeader. + """Create a Grid from the relevant sections of the FFHeader. Args: @@ -186,8 +183,7 @@ def regular_y(self, subgrid): raise NotImplementedError() def vectors(self, subgrid): - """ - Return the X and Y coordinate vectors for the given sub-grid of + """Return the X and Y coordinate vectors for the given sub-grid of this grid. Args: @@ -211,10 +207,7 @@ def vectors(self, subgrid): class ArakawaC(Grid): - """ - An abstract class representing an Arakawa C-grid. - - """ + """An abstract class representing an Arakawa C-grid.""" def _x_vectors(self): x_p, x_u = None, None @@ -229,8 +222,7 @@ def _x_vectors(self): return x_p, x_u def regular_x(self, subgrid): - """ - Return the "zeroth" value and step for the X coordinate on the + """Return the "zeroth" value and step for the X coordinate on the given sub-grid of this grid. Args: @@ -249,8 +241,7 @@ def regular_x(self, subgrid): return bzx, bdx def regular_y(self, subgrid): - """ - Return the "zeroth" value and step for the Y coordinate on the + """Return the "zeroth" value and step for the Y coordinate on the given sub-grid of this grid. Args: @@ -270,8 +261,7 @@ def regular_y(self, subgrid): class NewDynamics(ArakawaC): - """ - An Arakawa C-grid as used by UM New Dynamics. + """An Arakawa C-grid as used by UM New Dynamics. The theta and u points are at the poles. @@ -289,8 +279,7 @@ def _y_vectors(self): class ENDGame(ArakawaC): - """ - An Arakawa C-grid as used by UM ENDGame. + """An Arakawa C-grid as used by UM ENDGame. The v points are at the poles. @@ -308,16 +297,12 @@ def _y_vectors(self): class FFHeader: - """ - A class to represent the FIXED_LENGTH_HEADER section of a FieldsFile. - - """ + """A class to represent the FIXED_LENGTH_HEADER section of a FieldsFile.""" GRID_STAGGERING_CLASS = {3: NewDynamics, 6: ENDGame} def __init__(self, filename, word_depth=DEFAULT_FF_WORD_DEPTH): - """ - Create a FieldsFile header instance by reading the + """Create a FieldsFile header instance by reading the FIXED_LENGTH_HEADER section of the FieldsFile, making the names defined in FF_HEADER available as attributes of a FFHeader instance. @@ -412,8 +397,7 @@ def _attribute_is_pointer_and_needs_addressing(self, name): return is_referenceable def shape(self, name): - """ - Return the dimension shape of the FieldsFile FIXED_LENGTH_HEADER + """Return the dimension shape of the FieldsFile FIXED_LENGTH_HEADER pointer attribute. Args: @@ -453,14 +437,10 @@ def grid(self): class FF2PP: - """ - A class to extract the individual PPFields from within a FieldsFile. - - """ + """A class to extract the individual PPFields from within a FieldsFile.""" def __init__(self, filename, read_data=False, word_depth=DEFAULT_FF_WORD_DEPTH): - """ - Create a FieldsFile to Post Process instance that returns a generator + """Create a FieldsFile to Post Process instance that returns a generator of PPFields contained within the FieldsFile. Args: @@ -576,10 +556,7 @@ def range_order(range1, range2, resolution): return field_dim def _adjust_field_for_lbc(self, field): - """ - Make an LBC field look like a 'normal' field for rules processing. - - """ + """Make an LBC field look like a 'normal' field for rules processing.""" # Set LBTIM to indicate the specific time encoding for LBCs, # i.e. t1=forecast, t2=reference lbtim_default = 11 @@ -643,8 +620,7 @@ def _adjust_field_for_lbc(self, field): field.bzy -= field.bdy * boundary_packing.y_halo def _fields_over_all_levels(self, field): - """ - Replicate the field over all model levels, setting LBLEV for each. + """Replicate the field over all model levels, setting LBLEV for each. This is appropriate for LBC data. Yields an iterator producing a sequence of distinct field objects. @@ -831,8 +807,7 @@ def __iter__(self): def _parse_binary_stream(file_like, dtype=np.float64, count=-1): - """ - Replacement :func:`numpy.fromfile` due to python3 performance issues. + """Replacement :func:`numpy.fromfile` due to python3 performance issues. Args: @@ -865,8 +840,7 @@ def _parse_binary_stream(file_like, dtype=np.float64, count=-1): def load_cubes(filenames, callback, constraints=None): - """ - Loads cubes from a list of fields files filenames. + """Loads cubes from a list of fields files filenames. Args: @@ -890,8 +864,7 @@ def load_cubes(filenames, callback, constraints=None): def load_cubes_32bit_ieee(filenames, callback, constraints=None): - """ - Loads cubes from a list of 32bit ieee converted fieldsfiles filenames. + """Loads cubes from a list of 32bit ieee converted fieldsfiles filenames. .. seealso:: diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py index ca2f341249..9d8c46c777 100644 --- a/lib/iris/fileformats/_nc_load_rules/__init__.py +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Support for cube-specific CF-to-Iris translation operations. +"""Support for cube-specific CF-to-Iris translation operations. Interprets CF concepts identified by :mod:`iris.fileformats.cf` to add components into loaded cubes. diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d0fdd0e273..92d46c6693 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Replacement code for the Pyke rules. +"""Replacement code for the Pyke rules. For now, we are still emulating various aspects of how our original Pyke-based code used the Pyke 'engine' to hold translation data, both Pyke-specific and @@ -537,8 +536,7 @@ def action_formula_term(engine, formula_term_fact): def run_actions(engine): - """ - Run all actions for a cube. + """Run all actions for a cube. This is the top-level "activation" function which runs all the appropriate rules actions to translate facts and build all the cube elements. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index 7be5f9ed63..20527fdee4 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines +"""A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. This allows us to replace the Pyke rules operation with the simpler pure-Python @@ -21,8 +20,7 @@ class FactEntity: - """ - An object with an 'entity_lists' property which is a dict of 'FactList's. + """An object with an 'entity_lists' property which is a dict of 'FactList's. A Factlist, in turn, is an object with property 'case_specific_facts', which is a list of tuples of strings @@ -61,8 +59,7 @@ def sect_facts(self, fact_name): class Engine: - """ - A minimal mimic of a Pyke.engine. + """A minimal mimic of a Pyke.engine. Provides just enough API so that the existing code in :mod:`iris.fileformats.netcdf` can interface with our new rules functions. @@ -83,8 +80,7 @@ def reset(self): self.facts = FactEntity() def activate(self): - """ - Run all the translation rules to produce a single output cube. + """Run all the translation rules to produce a single output cube. This implicitly references the output variable for this operation, set by engine.cf_var (a CFDataVariable). @@ -96,8 +92,7 @@ def activate(self): run_actions(self) def get_kb(self): - """ - Get a FactEntity, which mimic (bits of) a knowledge-base. + """Get a FactEntity, which mimic (bits of) a knowledge-base. Just allowing :meth:`iris.fileformats.netcdf._action_activation_stats` to list the @@ -107,16 +102,14 @@ def get_kb(self): return self.facts def print_stats(self): - """ - No-op, called by + """No-op, called by :meth:`iris.fileformats.netcdf._action_activation_stats`. """ pass def add_case_specific_fact(self, fact_name, fact_arglist): - """ - Record a fact about the current output operation. + """Record a fact about the current output operation. Roughly, facts = self.facts.entity_lists[fact_name].case_specific_facts @@ -126,8 +119,7 @@ def add_case_specific_fact(self, fact_name, fact_arglist): self.facts.add_fact(fact_name, fact_arglist) def fact_list(self, fact_name): - """ - Return the facts (arg-lists) for one fact name. + """Return the facts (arg-lists) for one fact name. A shorthand form used only by the new 'actions' routines. @@ -138,8 +130,7 @@ def fact_list(self, fact_name): return self.facts.sect_facts(fact_name) def add_fact(self, fact_name, fact_arglist): - """ - Add a new fact. + """Add a new fact. A shorthand form used only by the new 'actions' routines. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 1b75594c2a..288161deb6 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -All the pure-Python 'helper' functions which were previously included in the +"""All the pure-Python 'helper' functions which were previously included in the Pyke rules database 'fc_rules_cf.krb'. The 'action' routines now call these, as the rules used to do. @@ -259,8 +258,7 @@ class _WarnComboIgnoringCfLoad( def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: - """ - Split a CF cell_methods attribute string into a list of zero or more cell + """Split a CF cell_methods attribute string into a list of zero or more cell methods, each of which is then parsed with a regex to return a list of match objects. @@ -324,17 +322,14 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: class UnknownCellMethodWarning(iris.exceptions.IrisUnknownCellMethodWarning): - """ - Backwards compatible form of :class:`iris.exceptions.IrisUnknownCellMethodWarning`. - """ + """Backwards compatible form of :class:`iris.exceptions.IrisUnknownCellMethodWarning`.""" # TODO: remove at the next major release. pass def parse_cell_methods(nc_cell_methods): - """ - Parse a CF cell_methods attribute string into a tuple of zero or + """Parse a CF cell_methods attribute string into a tuple of zero or more CellMethod instances. Args: @@ -485,8 +480,7 @@ def build_cube_metadata(engine): ################################################################################ def _get_ellipsoid(cf_grid_var): - """ - Return a :class:`iris.coord_systems.GeogCS` using the relevant properties of + """Return a :class:`iris.coord_systems.GeogCS` using the relevant properties of `cf_grid_var`. Returns None if no relevant properties are specified. """ major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) @@ -569,8 +563,7 @@ def build_rotated_coordinate_system(engine, cf_grid_var): ################################################################################ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): - """ - Create a transverse Mercator coordinate system from the CF-netCDF + """Create a transverse Mercator coordinate system from the CF-netCDF grid mapping variable. """ @@ -613,8 +606,7 @@ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): ################################################################################ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): - """ - Create a Lambert conformal conic coordinate system from the CF-netCDF + """Create a Lambert conformal conic coordinate system from the CF-netCDF grid mapping variable. """ @@ -644,8 +636,7 @@ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): ################################################################################ def build_stereographic_coordinate_system(engine, cf_grid_var): - """ - Create a stereographic coordinate system from the CF-netCDF + """Create a stereographic coordinate system from the CF-netCDF grid mapping variable. """ @@ -679,8 +670,7 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): ################################################################################ def build_polar_stereographic_coordinate_system(engine, cf_grid_var): - """ - Create a polar stereographic coordinate system from the CF-netCDF + """Create a polar stereographic coordinate system from the CF-netCDF grid mapping variable. """ @@ -715,8 +705,7 @@ def build_polar_stereographic_coordinate_system(engine, cf_grid_var): ################################################################################ def build_mercator_coordinate_system(engine, cf_grid_var): - """ - Create a Mercator coordinate system from the CF-netCDF + """Create a Mercator coordinate system from the CF-netCDF grid mapping variable. """ @@ -746,8 +735,7 @@ def build_mercator_coordinate_system(engine, cf_grid_var): ################################################################################ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): - """ - Create a lambert azimuthal equal area coordinate system from the CF-netCDF + """Create a lambert azimuthal equal area coordinate system from the CF-netCDF grid mapping variable. """ @@ -775,8 +763,7 @@ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): ################################################################################ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): - """ - Create a albers conical equal area coordinate system from the CF-netCDF + """Create a albers conical equal area coordinate system from the CF-netCDF grid mapping variable. """ @@ -806,8 +793,7 @@ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): ################################################################################ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): - """ - Create a vertical perspective coordinate system from the CF-netCDF + """Create a vertical perspective coordinate system from the CF-netCDF grid mapping variable. """ @@ -839,8 +825,7 @@ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): ################################################################################ def build_geostationary_coordinate_system(engine, cf_grid_var): - """ - Create a geostationary coordinate system from the CF-netCDF + """Create a geostationary coordinate system from the CF-netCDF grid mapping variable. """ @@ -874,8 +859,7 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): ################################################################################ def build_oblique_mercator_coordinate_system(engine, cf_grid_var): - """ - Create an oblique mercator coordinate system from the CF-netCDF + """Create an oblique mercator coordinate system from the CF-netCDF grid mapping variable. """ @@ -1005,8 +989,7 @@ def get_names(cf_coord_var, coord_name, attributes): ################################################################################ def get_cf_bounds_var(cf_coord_var): - """ - Return the CF variable representing the bounds of a coordinate + """Return the CF variable representing the bounds of a coordinate variable. """ @@ -1041,8 +1024,7 @@ def get_cf_bounds_var(cf_coord_var): ################################################################################ def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): - """ - Return a bounds_data array with the vertex dimension as the most + """Return a bounds_data array with the vertex dimension as the most rapidly varying. .. note:: @@ -1379,8 +1361,7 @@ def build_ancil_var(engine, cf_av_var): ################################################################################ def _is_lat_lon(cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes): - """ - Determine whether the CF coordinate variable is a latitude/longitude variable. + """Determine whether the CF coordinate variable is a latitude/longitude variable. Ref: [CF] Section 4.1 Latitude Coordinate. [CF] Section 4.2 Longitude Coordinate. @@ -1454,8 +1435,7 @@ def is_longitude(engine, cf_name): ################################################################################ def is_projection_x_coordinate(engine, cf_name): - """ - Determine whether the CF coordinate variable is a + """Determine whether the CF coordinate variable is a projection_x_coordinate variable. """ @@ -1468,8 +1448,7 @@ def is_projection_x_coordinate(engine, cf_name): ################################################################################ def is_projection_y_coordinate(engine, cf_name): - """ - Determine whether the CF coordinate variable is a + """Determine whether the CF coordinate variable is a projection_y_coordinate variable. """ @@ -1482,8 +1461,7 @@ def is_projection_y_coordinate(engine, cf_name): ################################################################################ def is_time(engine, cf_name): - """ - Determine whether the CF coordinate variable is a time variable. + """Determine whether the CF coordinate variable is a time variable. Ref: [CF] Section 4.4 Time Coordinate. diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index ca7638a052..e9f8d36324 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -r""" -The purpose of this module is to provide utilities for the identification +r"""The purpose of this module is to provide utilities for the identification of multi-dimensional structure in a flat sequence of homogeneous objects. One application of this is to efficiently identify a higher dimensional structure from a sorted sequence of PPField instances; for an example, given @@ -48,16 +47,14 @@ class _UnstructuredArrayException(Exception): - """ - Raised when an array has been incorrectly assumed to be + """Raised when an array has been incorrectly assumed to be structured in a specific way. """ class ArrayStructure(namedtuple("ArrayStructure", ["stride", "unique_ordered_values"])): - """ - Represents the identified structure of an array, where stride is the + """Represents the identified structure of an array, where stride is the step between each unique value being seen in order in the flattened version of the array. @@ -95,8 +92,7 @@ def __new__(cls, stride, unique_ordered_values): @property def size(self): - """ - The ``size`` attribute is the number of the unique values in the + """The ``size`` attribute is the number of the unique values in the original array. It is **not** the length of the original array. """ @@ -118,8 +114,7 @@ def __ne__(self, other): return not (self == other) def construct_array(self, size): - """ - The inverse operation of :func:`ArrayStructure.from_array`, returning + """The inverse operation of :func:`ArrayStructure.from_array`, returning a 1D array of the given length with the appropriate repetition pattern. @@ -130,8 +125,7 @@ def construct_array(self, size): ) def nd_array_and_dims(self, original_array, target_shape, order="c"): - """ - Given a 1D array, and a target shape, construct an ndarray + """Given a 1D array, and a target shape, construct an ndarray and associated dimensions. Raises an _UnstructuredArrayException if no optimised shape array can @@ -205,8 +199,7 @@ def nd_array_and_dims(self, original_array, target_shape, order="c"): @classmethod def from_array(cls, arr): - """ - Return the computed ArrayStructure for the given flat array + """Return the computed ArrayStructure for the given flat array (if a structure exists, otherwise return None). """ @@ -291,17 +284,14 @@ def from_array(cls, arr): class GroupStructure: - """ - The GroupStructure class represents a collection of array structures along + """The GroupStructure class represents a collection of array structures along with additional information such as the length of the arrays and the array order in which they are found (row-major or column-major). """ def __init__(self, length, component_structure, array_order="c"): - """ - group_component_to_array - a dictionary. See also TODO - """ + """group_component_to_array - a dictionary. See also TODO""" #: The size common to all of the original arrays and used to determine #: possible shape configurations. self.length = length @@ -317,8 +307,7 @@ def __init__(self, length, component_structure, array_order="c"): @classmethod def from_component_arrays(cls, component_arrays, array_order="c"): - """ - Given a dictionary of component name to flattened numpy array, + """Given a dictionary of component name to flattened numpy array, return an :class:`GroupStructure` instance which is representative of the underlying array structures. @@ -340,8 +329,7 @@ def from_component_arrays(cls, component_arrays, array_order="c"): return cls(sizes[0], cmpt_structure, array_order=array_order) def _potentially_flattened_components(self): - """ - Return a generator of the components which could form non-trivial + """Return a generator of the components which could form non-trivial (i.e. ``length > 1``) array dimensions. """ @@ -354,8 +342,7 @@ def is_row_major(self): return self._array_order == "c" def possible_structures(self): - """ - Return a tuple containing the possible structures that this group + """Return a tuple containing the possible structures that this group could have. A structure in this case is an iterable of @@ -456,8 +443,7 @@ def __str__(self): return "\n".join(result) def build_arrays(self, shape, elements_arrays): - """ - Given the target shape, and a dictionary mapping name to 1D array of + """Given the target shape, and a dictionary mapping name to 1D array of :attr:`.length`, return a dictionary mapping element name to ``(ndarray, dims)``. diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 677945dac3..3f7a6b18e0 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides ABF (and ABL) file format capabilities. +"""Provides ABF (and ABL) file format capabilities. ABF and ABL files are satellite file formats defined by Boston University. Including this module adds ABF and ABL loading to the session's capabilities. @@ -57,16 +56,14 @@ class ABFField: - """ - A data field from an ABF (or ABL) file. + """A data field from an ABF (or ABL) file. Capable of creating a :class:`~iris.cube.Cube`. """ def __init__(self, filename): - """ - Create an ABFField object from the given filename. + """Create an ABFField object from the given filename. Args: @@ -196,8 +193,7 @@ def to_cube(self): def load_cubes(filespecs, callback=None): - """ - Loads cubes from a list of ABF filenames. + """Loads cubes from a list of ABF filenames. Args: diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 1d0fb5e6af..0acc03967b 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides the capability to load netCDF files and interpret them +"""Provides the capability to load netCDF files and interpret them according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. References: @@ -112,8 +111,7 @@ def _identify_common(variables, ignore, target): @abstractmethod def identify(self, variables, ignore=None, target=None, warn=True): - """ - Identify all variables that match the criterion for this CF-netCDF variable class. + """Identify all variables that match the criterion for this CF-netCDF variable class. Args: @@ -136,8 +134,7 @@ def identify(self, variables, ignore=None, target=None, warn=True): pass def spans(self, cf_variable): - """ - Determine whether the dimensionality of this variable + """Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the @@ -217,8 +214,7 @@ def cf_attrs_reset(self): self._cf_attrs = set([item[0] for item in self.cf_attrs_ignored()]) def add_formula_term(self, root, term): - """ - Register the participation of this CF-netCDF variable in a CF-netCDF formula term. + """Register the participation of this CF-netCDF variable in a CF-netCDF formula term. Args: @@ -234,8 +230,7 @@ def add_formula_term(self, root, term): self.cf_terms_by_root[root] = term def has_formula_terms(self): - """ - Determine whether this CF-netCDF variable participates in a CF-netcdf formula term. + """Determine whether this CF-netCDF variable participates in a CF-netcdf formula term. Returns: Boolean. @@ -245,8 +240,7 @@ def has_formula_terms(self): class CFAncillaryDataVariable(CFVariable): - """ - A CF-netCDF ancillary data variable is a variable that provides metadata + """A CF-netCDF ancillary data variable is a variable that provides metadata about the individual values of another data variable. Identified by the CF-netCDF variable attribute 'ancillary_variables'. @@ -286,8 +280,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFAuxiliaryCoordinateVariable(CFVariable): - """ - A CF-netCDF auxiliary coordinate variable is any netCDF variable that contains + """A CF-netCDF auxiliary coordinate variable is any netCDF variable that contains coordinate data, but is not a CF-netCDF coordinate variable by definition. There is no relationship between the name of a CF-netCDF auxiliary coordinate @@ -334,8 +327,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFBoundaryVariable(CFVariable): - """ - A CF-netCDF boundary variable is associated with a CF-netCDF variable that contains + """A CF-netCDF boundary variable is associated with a CF-netCDF variable that contains coordinate data. When a data value provides information about conditions in a cell occupying a region of space/time or some other dimension, the boundary variable provides a description of cell extent. @@ -378,8 +370,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): return result def spans(self, cf_variable): - """ - Determine whether the dimensionality of this variable + """Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the @@ -407,8 +398,7 @@ def spans(self, cf_variable): class CFClimatologyVariable(CFVariable): - """ - A CF-netCDF climatology variable is associated with a CF-netCDF variable that contains + """A CF-netCDF climatology variable is associated with a CF-netCDF variable that contains coordinate data. When a data value provides information about conditions in a cell occupying a region of space/time or some other dimension, the climatology variable provides a climatological description of cell extent. @@ -451,8 +441,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): return result def spans(self, cf_variable): - """ - Determine whether the dimensionality of this variable + """Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the @@ -480,8 +469,7 @@ def spans(self, cf_variable): class CFCoordinateVariable(CFVariable): - """ - A CF-netCDF coordinate variable is a one-dimensional variable with the same name + """A CF-netCDF coordinate variable is a one-dimensional variable with the same name as its dimension, and it is defined as a numeric data type with values that are ordered monotonically. Missing values are not allowed in CF-netCDF coordinate variables. Also see [NUG] Section 2.3.1. @@ -527,10 +515,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True, monotonic=Fals class CFDataVariable(CFVariable): - """ - A CF-netCDF variable containing data pay-load that maps to an Iris :class:`iris.cube.Cube`. - - """ + """A CF-netCDF variable containing data pay-load that maps to an Iris :class:`iris.cube.Cube`.""" @classmethod def identify(cls, variables, ignore=None, target=None, warn=True): @@ -538,8 +523,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class _CFFormulaTermsVariable(CFVariable): - """ - A CF-netCDF formula terms variable corresponds to a term in a formula that + """A CF-netCDF formula terms variable corresponds to a term in a formula that allows dimensional vertical coordinate values to be computed from dimensionless vertical coordinate values and associated variables at specific grid points. @@ -607,8 +591,7 @@ def __repr__(self): class CFGridMappingVariable(CFVariable): - """ - A CF-netCDF grid mapping variable contains a list of specific attributes that + """A CF-netCDF grid mapping variable contains a list of specific attributes that define a particular grid mapping. A CF-netCDF grid mapping variable must contain the attribute 'grid_mapping_name'. @@ -653,8 +636,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFLabelVariable(CFVariable): - """ - A CF-netCDF CF label variable is any netCDF variable that contain string + """A CF-netCDF CF label variable is any netCDF variable that contain string textual information, or labels. Identified by the CF-netCDF variable attribute 'coordinates'. @@ -695,8 +677,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): return result def cf_label_data(self, cf_data_var): - """ - Return the associated CF-netCDF label variable strings. + """Return the associated CF-netCDF label variable strings. Args: @@ -762,8 +743,7 @@ def cf_label_data(self, cf_data_var): return data def cf_label_dimensions(self, cf_data_var): - """ - Return the name of the associated CF-netCDF label variable data dimensions. + """Return the name of the associated CF-netCDF label variable data dimensions. Args: @@ -790,8 +770,7 @@ def cf_label_dimensions(self, cf_data_var): ) def spans(self, cf_variable): - """ - Determine whether the dimensionality of this variable + """Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the @@ -819,8 +798,7 @@ def spans(self, cf_variable): class CFMeasureVariable(CFVariable): - """ - A CF-netCDF measure variable is a variable that contains cell areas or volumes. + """A CF-netCDF measure variable is a variable that contains cell areas or volumes. Identified by the CF-netCDF variable attribute 'cell_measures'. @@ -872,8 +850,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): ################################################################################ class CFGroup(MutableMapping): - """ - Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata + """Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata Conventions' variables and netCDF global attributes. """ @@ -950,8 +927,7 @@ def cell_measures(self): @property def non_data_variable_names(self): - """ - :class:`set` of the names of the CF-netCDF variables that are not + """:class:`set` of the names of the CF-netCDF variables that are not the data pay-load. """ @@ -1021,8 +997,7 @@ def __repr__(self): ################################################################################ class CFReader: - """ - This class allows the contents of a netCDF file to be interpreted according + """This class allows the contents of a netCDF file to be interpreted according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. """ @@ -1319,8 +1294,7 @@ def __del__(self): def _getncattr(dataset, attr, default=None): - """ - Simple wrapper round `netCDF4.Dataset.getncattr` to make it behave + """Simple wrapper round `netCDF4.Dataset.getncattr` to make it behave more like `getattr`. """ diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 04fd96ee38..6175dd553f 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`. +"""Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`. """ @@ -56,8 +55,7 @@ def _dot_path(): def save(cube, target): - """ - Save a dot representation of the cube. + """Save a dot representation of the cube. Args ---- @@ -87,8 +85,7 @@ def save(cube, target): def save_png(source, target, launch=False): - """ - Produce a "dot" instance diagram by calling dot and optionally launching + """Produce a "dot" instance diagram by calling dot and optionally launching the resulting image. Args @@ -151,8 +148,7 @@ def save_png(source, target, launch=False): def cube_text(cube): - """ - Return a DOT text representation a `iris.cube.Cube`. + """Return a DOT text representation a `iris.cube.Cube`. Args ---- @@ -282,8 +278,7 @@ def cube_text(cube): def _coord_text(label, coord): - """ - Return a string containing the dot representation for a single coordinate + """Return a string containing the dot representation for a single coordinate node. Args @@ -313,8 +308,7 @@ def _coord_text(label, coord): def _coord_system_text(cs, uid): - """ - Return a string containing the dot representation for a single coordinate + """Return a string containing the dot representation for a single coordinate system node. Args @@ -341,8 +335,7 @@ def _coord_system_text(cs, uid): def _dot_node(indent, id, name, attributes): - """ - Return a string containing the dot representation for a single node. + """Return a string containing the dot representation for a single node. Args ---- diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index 8b7135bce8..4742ec9001 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -6,8 +6,7 @@ def _get_NAME_loader(filename): - """ - Return the appropriate load function for a NAME file based + """Return the appropriate load function for a NAME file based on the contents of its header. """ @@ -44,8 +43,7 @@ def _get_NAME_loader(filename): def load_cubes(filenames, callback): - """ - Return a generator of cubes given one or more filenames and an + """Return a generator of cubes given one or more filenames and an optional callback. Args: diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index ef6057520a..8e1a9f5f9d 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -45,8 +45,7 @@ def _split_name_and_units(name): def read_header(file_handle): - """ - Return a dictionary containing the header information extracted + """Return a dictionary containing the header information extracted from the the provided NAME file object. Args: @@ -96,8 +95,7 @@ def read_header(file_handle): def _read_data_arrays(file_handle, n_arrays, shape): - """ - Return a list of NumPy arrays containing the data extracted from + """Return a list of NumPy arrays containing the data extracted from the provided file object. The number and shape of the arrays must be specified. @@ -126,8 +124,7 @@ def _read_data_arrays(file_handle, n_arrays, shape): def _build_lat_lon_for_NAME_field( header, dimindex, x_or_y, coord_names=["longitude", "latitude"] ): - """ - Return regular latitude and longitude coordinates extracted from + """Return regular latitude and longitude coordinates extracted from the provided header dictionary. """ @@ -148,8 +145,7 @@ def _build_lat_lon_for_NAME_field( def _build_lat_lon_for_NAME_timeseries(column_headings): - """ - Return regular latitude and longitude coordinates extracted from + """Return regular latitude and longitude coordinates extracted from the provided column_headings dictionary. """ @@ -188,8 +184,7 @@ def _build_lat_lon_for_NAME_timeseries(column_headings): def _calc_integration_period(time_avgs): - """ - Return a list of datetime.timedelta objects determined from the provided + """Return a list of datetime.timedelta objects determined from the provided list of averaging/integration period column headings. """ @@ -216,8 +211,7 @@ def _calc_integration_period(time_avgs): def _parse_units(units): - """ - Return a known :class:`cf_units.Unit` given a NAME unit + """Return a known :class:`cf_units.Unit` given a NAME unit .. note:: @@ -267,8 +261,7 @@ def _parse_units(units): def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None): - """ - Parser for the z component of field headings. + """Parser for the z component of field headings. This parse is specifically for handling the z component of NAME field headings, which include height above ground level, height above sea level @@ -391,8 +384,7 @@ def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None): def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None): - """ - Yield :class:`iris.cube.Cube` instances given + """Yield :class:`iris.cube.Cube` instances given the headers, column headings, coords and data_arrays extracted from a NAME file. @@ -554,8 +546,7 @@ def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=N def _build_cell_methods(av_or_ints, coord): - """ - Return a list of :class:`iris.coords.CellMethod` instances + """Return a list of :class:`iris.coords.CellMethod` instances based on the provided list of column heading entries and the associated coordinate. If a given entry does not correspond to a cell method (e.g. "No time averaging"), a value of None is inserted. @@ -592,8 +583,7 @@ def _build_cell_methods(av_or_ints, coord): def load_NAMEIII_field(filename): - """ - Load a NAME III grid output file returning a + """Load a NAME III grid output file returning a generator of :class:`iris.cube.Cube` instances. Args: @@ -687,8 +677,7 @@ def load_NAMEIII_field(filename): def load_NAMEII_field(filename): - """ - Load a NAME II grid output file returning a + """Load a NAME II grid output file returning a generator of :class:`iris.cube.Cube` instances. Args: @@ -775,8 +764,7 @@ def load_NAMEII_field(filename): def load_NAMEIII_timeseries(filename): - """ - Load a NAME III time series file returning a + """Load a NAME III time series file returning a generator of :class:`iris.cube.Cube` instances. Args: @@ -864,8 +852,7 @@ def load_NAMEIII_timeseries(filename): def load_NAMEII_timeseries(filename): - """ - Load a NAME II Time Series file returning a + """Load a NAME II Time Series file returning a generator of :class:`iris.cube.Cube` instances. Args: @@ -935,8 +922,7 @@ def load_NAMEII_timeseries(filename): def load_NAMEIII_version2(filename): - """ - Load a NAME III version 2 file returning a + """Load a NAME III version 2 file returning a generator of :class:`iris.cube.Cube` instances. Args: @@ -1155,8 +1141,7 @@ def load_NAMEIII_version2(filename): def load_NAMEIII_trajectory(filename): - """ - Load a NAME III trajectory file returning a + """Load a NAME III trajectory file returning a generator of :class:`iris.cube.Cube` instances. Args: diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index 1b01e71ca8..61b6f74cc6 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Support loading and saving NetCDF files using CF conventions for metadata interpretation. +"""Support loading and saving NetCDF files using CF conventions for metadata interpretation. See : `NetCDF User's Guide `_ and `netCDF4 python module `_. diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index c3159781b5..9aafbf312d 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Module to ensure all calls to the netCDF4 library are thread-safe. +"""Module to ensure all calls to the netCDF4 library are thread-safe. Intention is that no other Iris module should import the netCDF4 module. @@ -23,8 +22,7 @@ class _ThreadSafeWrapper(ABC): - """ - Contains a netCDF4 class instance, ensuring wrapping all API calls. + """Contains a netCDF4 class instance, ensuring wrapping all API calls. Contains a netCDF4 class instance, ensuring wrapping all API calls within _GLOBAL_NETCDF4_LOCK. @@ -96,8 +94,7 @@ def __setitem__(self, key, value): class DimensionWrapper(_ThreadSafeWrapper): - """ - Accessor for a netCDF4.Dimension, always acquiring _GLOBAL_NETCDF4_LOCK. + """Accessor for a netCDF4.Dimension, always acquiring _GLOBAL_NETCDF4_LOCK. All API calls should be identical to those for netCDF4.Dimension. """ @@ -107,8 +104,7 @@ class DimensionWrapper(_ThreadSafeWrapper): class VariableWrapper(_ThreadSafeWrapper): - """ - Accessor for a netCDF4.Variable, always acquiring _GLOBAL_NETCDF4_LOCK. + """Accessor for a netCDF4.Variable, always acquiring _GLOBAL_NETCDF4_LOCK. All API calls should be identical to those for netCDF4.Variable. """ @@ -117,8 +113,7 @@ class VariableWrapper(_ThreadSafeWrapper): _DUCKTYPE_CHECK_PROPERTIES = ["dimensions", "dtype"] def setncattr(self, *args, **kwargs) -> None: - """ - Call netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK. + """Call netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK. Only defined explicitly in order to get some mocks to work. """ @@ -127,8 +122,7 @@ def setncattr(self, *args, **kwargs) -> None: @property def dimensions(self) -> typing.List[str]: - """ - Calls netCDF4.Variable.dimensions within _GLOBAL_NETCDF4_LOCK. + """Calls netCDF4.Variable.dimensions within _GLOBAL_NETCDF4_LOCK. Only defined explicitly in order to get some mocks to work. """ @@ -141,8 +135,7 @@ def dimensions(self) -> typing.List[str]: # DimensionWrapper(s). def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]: - """ - Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK. + """Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK. Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. The original returned netCDF4.Dimensions @@ -155,8 +148,7 @@ def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]: class GroupWrapper(_ThreadSafeWrapper): - """ - Accessor for a netCDF4.Group, always acquiring _GLOBAL_NETCDF4_LOCK. + """Accessor for a netCDF4.Group, always acquiring _GLOBAL_NETCDF4_LOCK. All API calls should be identical to those for netCDF4.Group. """ @@ -170,8 +162,7 @@ class GroupWrapper(_ThreadSafeWrapper): @property def dimensions(self) -> typing.Dict[str, DimensionWrapper]: - """ - Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. The original returned netCDF4.Dimensions @@ -183,8 +174,7 @@ def dimensions(self) -> typing.Dict[str, DimensionWrapper]: return {k: DimensionWrapper.from_existing(v) for k, v in dimensions_.items()} def createDimension(self, *args, **kwargs) -> DimensionWrapper: - """ - Call createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Call createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Call createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. The original returned @@ -201,8 +191,7 @@ def createDimension(self, *args, **kwargs) -> DimensionWrapper: @property def variables(self) -> typing.Dict[str, VariableWrapper]: - """ - Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. The original returned netCDF4.Variables @@ -214,8 +203,7 @@ def variables(self) -> typing.Dict[str, VariableWrapper]: return {k: VariableWrapper.from_existing(v) for k, v in variables_.items()} def createVariable(self, *args, **kwargs) -> VariableWrapper: - """ - Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. The original @@ -230,8 +218,7 @@ def createVariable(self, *args, **kwargs) -> VariableWrapper: def get_variables_by_attributes( self, *args, **kwargs ) -> typing.List[VariableWrapper]: - """ - Call get_variables_by_attributes() from netCDF4.Group/Dataset. + """Call get_variables_by_attributes() from netCDF4.Group/Dataset. Call get_variables_by_attributes() from netCDF4.Group/Dataset within_GLOBAL_NETCDF4_LOCK, returning VariableWrappers. @@ -251,8 +238,7 @@ def get_variables_by_attributes( @property def groups(self): - """ - Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers. @@ -267,8 +253,7 @@ def groups(self): @property def parent(self): - """ - Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper. @@ -282,8 +267,7 @@ def parent(self): return GroupWrapper.from_existing(parent_) def createGroup(self, *args, **kwargs): - """ - Call createGroup() from netCDF4.Group/Dataset. + """Call createGroup() from netCDF4.Group/Dataset. Call createGroup() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. The original returned @@ -297,8 +281,7 @@ def createGroup(self, *args, **kwargs): class DatasetWrapper(GroupWrapper): - """ - Accessor for a netCDF4.Dataset, always acquiring _GLOBAL_NETCDF4_LOCK. + """Accessor for a netCDF4.Dataset, always acquiring _GLOBAL_NETCDF4_LOCK. All API calls should be identical to those for netCDF4.Dataset. """ @@ -309,8 +292,7 @@ class DatasetWrapper(GroupWrapper): @classmethod def fromcdl(cls, *args, **kwargs): - """ - Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK. + """Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK. Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, returning a DatasetWrapper. The original returned netCDF4.Dataset is @@ -370,8 +352,7 @@ def __setstate__(self, state): class NetCDFWriteProxy: - """ - An object mimicking the data access of a netCDF4.Variable. + """An object mimicking the data access of a netCDF4.Variable. The "opposite" of a NetCDFDataProxy : An object mimicking the data access of a netCDF4.Variable, but where the data is to be ***written to***. diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index 6a25089ef1..b070b80d69 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -165,8 +165,7 @@ def _read_chars(infile, num): class NimrodField: - """ - A data field from a NIMROD file. + """A data field from a NIMROD file. Capable of converting itself into a :class:`~iris.cube.Cube` @@ -178,8 +177,7 @@ class NimrodField: """ def __init__(self, from_file=None): - """ - Create a NimrodField object and optionally read from an open file. + """Create a NimrodField object and optionally read from an open file. Example:: @@ -242,8 +240,7 @@ def _read_header(self, infile): ) def _read_data(self, infile): - """ - Read the data array: int8, int16, int32 or float32 + """Read the data array: int8, int16, int32 or float32 (surrounded by 4-byte length, at start and end) @@ -293,8 +290,7 @@ def _read_data(self, infile): def load_cubes(filenames, callback=None): - """ - Loads cubes from a list of NIMROD filenames. + """Loads cubes from a list of NIMROD filenames. Args: diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index d72571fff9..bb6d13f50a 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -32,9 +32,7 @@ class TranslationWarning(IrisNimrodTranslationWarning): - """ - Backwards compatible form of :class:`iris.exceptions.IrisNimrodTranslationWarning`. - """ + """Backwards compatible form of :class:`iris.exceptions.IrisNimrodTranslationWarning`.""" # TODO: remove at the next major release. pass @@ -91,16 +89,12 @@ def name(cube, field, handle_metadata_errors): def remove_unprintable_chars(input_str): - """ - Remove unprintable characters from a string and return the result. - - """ + """Remove unprintable characters from a string and return the result.""" return "".join(c if c in string.printable else " " for c in input_str).strip() def units(cube, field): - """ - Set the cube's units from the field. + """Set the cube's units from the field. Takes into account nimrod unit strings of the form unit*?? where the data needs to converted by dividing by ??. Also converts units we know Iris @@ -243,8 +237,7 @@ def reference_time(cube, field): def forecast_period(cube): - """ - Add a forecast_period coord based on existing time and + """Add a forecast_period coord based on existing time and forecast_reference_time coords. Must be run after time() and reference_time() @@ -278,10 +271,7 @@ def forecast_period(cube): def mask_cube(cube, field): - """ - Update cube.data to be a masked array if appropriate. - - """ + """Update cube.data to be a masked array if appropriate.""" dtype = cube.dtype masked_points = None if field.datum_type == 1: @@ -303,8 +293,7 @@ def experiment(cube, field): def proj_biaxial_ellipsoid(field, handle_metadata_errors): - """ - Return the correct dictionary of arguments needed to define an + """Return the correct dictionary of arguments needed to define an iris.coord_systems.GeogCS. Based firstly on the value given by ellipsoid, then by grid if ellipsoid is @@ -350,8 +339,7 @@ def proj_biaxial_ellipsoid(field, handle_metadata_errors): def set_british_national_grid_defaults(field, handle_metadata_errors): - """ - Check for missing coord-system meta-data and set default values for + """Check for missing coord-system meta-data and set default values for the Ordnance Survey GB Transverse Mercator projection. Some Radarnet files are missing these. @@ -467,8 +455,7 @@ def horizontal_grid(cube, field, handle_metadata_errors): def vertical_coord(cube, field): - """ - Add a vertical coord to the cube, with bounds, if appropriate. + """Add a vertical coord to the cube, with bounds, if appropriate. Handles special numbers for "at-sea-level" (8888) and "at-ground-level" (9999). @@ -660,8 +647,7 @@ def add_attr(item): def known_threshold_coord(field): - """ - Supplies known threshold coord meta-data for known use cases. + """Supplies known threshold coord meta-data for known use cases. threshold_value_alt exists because some meta-data are mis-assigned in the Nimrod data. @@ -709,8 +695,7 @@ def known_threshold_coord(field): def probability_coord(cube, field, handle_metadata_errors): - """ - Add a coord relating to probability meta-data from the header to the + """Add a coord relating to probability meta-data from the header to the cube if appropriate. Must be run after the name method. @@ -894,8 +879,7 @@ def time_averaging(cube, field): def run(field, handle_metadata_errors=True): - """ - Convert a NIMROD field to an Iris cube. + """Convert a NIMROD field to an Iris cube. Args ---- diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index e70d4cbf52..f6eb3d7168 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides UK Met Office Post Process (PP) format specific capabilities. +"""Provides UK Met Office Post Process (PP) format specific capabilities. """ @@ -246,8 +245,7 @@ class _WarnComboIgnoringLoad( class STASH(collections.namedtuple("STASH", "model section item")): - """ - A class to hold a single STASH code. + """A class to hold a single STASH code. Create instances using: >>> model = 1 @@ -277,8 +275,7 @@ class STASH(collections.namedtuple("STASH", "model section item")): __slots__ = () def __new__(cls, model, section, item): - """ - Args + """Args ---- model A positive integer less than 100, or None. @@ -361,8 +358,7 @@ def __ne__(self, other): class SplittableInt: - """ - A class to hold integers which can easily get each decimal digit + """A class to hold integers which can easily get each decimal digit individually. >>> three_six_two = SplittableInt(362) @@ -380,8 +376,7 @@ class SplittableInt: """ def __init__(self, value, name_mapping_dict=None): - """ - Build a SplittableInt given the positive integer value provided. + """Build a SplittableInt given the positive integer value provided. Args ---- @@ -664,8 +659,7 @@ def __ne__(self, other): def _data_bytes_to_shaped_array( data_bytes, lbpack, boundary_packing, data_shape, data_type, mdi, mask=None ): - """ - Convert the already read binary data payload into a numpy array, unpacking + """Convert the already read binary data payload into a numpy array, unpacking and decompressing as per the F3 specification. """ @@ -817,8 +811,7 @@ def _data_bytes_to_shaped_array( def _header_defn(release_number): - """ - Return the zero-indexed header definition for a particular release of + """Return the zero-indexed header definition for a particular release of a PPField. """ @@ -831,11 +824,10 @@ def _header_defn(release_number): def _pp_attribute_names(header_defn): - """ - Return the allowed attributes of a PPField: - all of the normal headers (i.e. not the _SPECIAL_HEADERS), - the _SPECIAL_HEADERS with '_' prefixed, - the possible extra data headers. + """Return the allowed attributes of a PPField: + all of the normal headers (i.e. not the _SPECIAL_HEADERS), + the _SPECIAL_HEADERS with '_' prefixed, + the possible extra data headers. """ normal_headers = list( @@ -854,8 +846,7 @@ def _pp_attribute_names(header_defn): class PPField(metaclass=ABCMeta): - """ - A generic class for PP fields - not specific to a particular + """A generic class for PP fields - not specific to a particular header release number. A PPField instance can easily access the PP header "words" as attributes @@ -888,8 +879,7 @@ def __init__(self, header=None): self.raw_lbpack = header[self.HEADER_DICT["lbpack"][0]] def __getattr__(self, key): - """ - Method supports deferred attribute creation, which offers a + """Method supports deferred attribute creation, which offers a significant loading optimisation, particularly when not all attributes are referenced and therefore created on the instance. @@ -982,8 +972,7 @@ def __repr__(self): @property def stash(self): - """ - Stash property giving access to the associated STASH object, + """Stash property giving access to the associated STASH object, now supporting __eq__ """ @@ -1062,8 +1051,7 @@ def lbproc(self, value): @property def data(self): - """ - :class:`numpy.ndarray` representing the multidimensional data + """:class:`numpy.ndarray` representing the multidimensional data of the pp file """ @@ -1137,8 +1125,7 @@ def y_bounds(self): return np.column_stack((self.y_lower_bound, self.y_upper_bound)) def save(self, file_handle): - """ - Save the PPField to the given file object. + """Save the PPField to the given file object. (typically created with :func:`open`):: # to append the field to a file @@ -1372,8 +1359,7 @@ def coord_system(self): geog_cs = iris.coord_systems.GeogCS(EARTH_RADIUS) def degrees_ne(angle, ref_angle): - """ - Return whether an angle differs significantly from a set value. + """Return whether an angle differs significantly from a set value. The inputs are in degrees. The difference is judged significant if more than 0.0001 degrees. @@ -1410,8 +1396,7 @@ def _y_coord_name(self): return y_name def copy(self): - """ - Return a deep copy of this PPField. + """Return a deep copy of this PPField. Returns ------- @@ -1461,8 +1446,7 @@ def __ne__(self, other): class PPField2(PPField): - """ - A class to hold a single field from a PP file, with a + """A class to hold a single field from a PP file, with a header release number of 2. """ @@ -1474,8 +1458,7 @@ class PPField2(PPField): @property def t1(self): - """ - cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + """cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, and lbmin attributes. """ @@ -1506,8 +1489,7 @@ def t1(self, dt): @property def t2(self): - """ - cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + """cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, and lbmind attributes. """ @@ -1538,8 +1520,7 @@ def t2(self, dt): class PPField3(PPField): - """ - A class to hold a single field from a PP file, with a + """A class to hold a single field from a PP file, with a header release number of 3. """ @@ -1551,8 +1532,7 @@ class PPField3(PPField): @property def t1(self): - """ - cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + """cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, lbmin, and lbsec attributes. """ @@ -1584,8 +1564,7 @@ def t1(self, dt): @property def t2(self): - """ - cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + """cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, lbmind, and lbsecd attributes. """ @@ -1632,8 +1611,7 @@ def make_pp_field(header): def load(filename, read_data=False, little_ended=False): - """ - Return an iterator of PPFields given a filename. + """Return an iterator of PPFields given a filename. Args ---- @@ -1661,8 +1639,7 @@ def load(filename, read_data=False, little_ended=False): def _interpret_fields(fields): - """ - Turn the fields read with load and FF2PP._extract_field into usable + """Turn the fields read with load and FF2PP._extract_field into usable fields. One of the primary purposes of this function is to either convert "deferred bytes" into "deferred arrays" or "loaded bytes" into actual numpy arrays (via the _create_field_data) function. @@ -1730,8 +1707,7 @@ def _interpret_fields(fields): def _create_field_data(field, data_shape, land_mask_field=None): - """ - Modify a field's ``_data`` attribute either by: + """Modify a field's ``_data`` attribute either by: * converting a 'deferred array bytes' tuple into a lazy array, * converting LoadedArrayBytes into an actual numpy array. @@ -1828,8 +1804,7 @@ def calc_array(mask, values): def _field_gen(filename, read_data_bytes, little_ended=False): - """ - Return a generator of "half-formed" PPField instances derived from + """Return a generator of "half-formed" PPField instances derived from the given filename. A field returned by the generator is only "half-formed" because its @@ -1957,8 +1932,7 @@ def _field_gen(filename, read_data_bytes, little_ended=False): def _convert_constraints(constraints): - """ - Convert known constraints from Iris semantics to PP semantics + """Convert known constraints from Iris semantics to PP semantics ignoring all unknown constraints. """ @@ -1967,8 +1941,7 @@ def _convert_constraints(constraints): unhandled_constraints = False def _make_func(stashobj): - """ - Provide unique name-space for each lambda function's stashobj + """Provide unique name-space for each lambda function's stashobj variable. """ @@ -2002,8 +1975,7 @@ def _make_func(stashobj): unhandled_constraints = True def pp_filter(field): - """ - Return True if field is to be kept, + """Return True if field is to be kept, False if field does not match filter """ @@ -2025,8 +1997,7 @@ def pp_filter(field): def load_cubes(filenames, callback=None, constraints=None): - """ - Load cubes from a list of pp filenames. + """Load cubes from a list of pp filenames. Args ---- @@ -2051,8 +2022,7 @@ def load_cubes(filenames, callback=None, constraints=None): def load_cubes_little_endian(filenames, callback=None, constraints=None): - """ - Load cubes from a list of pp filenames containing little-endian data. + """Load cubes from a list of pp filenames containing little-endian data. Args ---- @@ -2081,8 +2051,7 @@ def load_cubes_little_endian(filenames, callback=None, constraints=None): def load_pairs_from_fields(pp_fields): - r""" - Convert an iterable of PP fields into an iterable of tuples of + r"""Convert an iterable of PP fields into an iterable of tuples of (Cubes, PPField). Args @@ -2178,8 +2147,7 @@ def _load_cubes_variable_loader( def save(cube, target, append=False, field_coords=None): - """ - Use the PP saving rules (and any user rules) to save a cube to a PP file. + """Use the PP saving rules (and any user rules) to save a cube to a PP file. Args ---- @@ -2215,8 +2183,7 @@ def save(cube, target, append=False, field_coords=None): def save_pairs_from_cube(cube, field_coords=None, target=None): - """ - Use the PP saving rules to convert a cube or + """Use the PP saving rules to convert a cube or iterable of cubes to an iterable of (2D cube, PP field) pairs. Args @@ -2332,8 +2299,7 @@ def save_pairs_from_cube(cube, field_coords=None, target=None): def as_fields(cube, field_coords=None, target=None): - """ - Use the PP saving rules (and any user rules) to convert a cube to + """Use the PP saving rules (and any user rules) to convert a cube to an iterable of PP fields. Args @@ -2359,8 +2325,7 @@ def as_fields(cube, field_coords=None, target=None): def save_fields(fields, target, append=False): - """ - Save an iterable of PP fields to a PP file. + """Save an iterable of PP fields to a PP file. Args ---- diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index fcc54951b6..f3ed22377e 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -58,8 +58,7 @@ def _convert_vertical_coords( brlev, dim=None, ): - """ - Encode scalar or vector vertical level values from PP headers as CM data + """Encode scalar or vector vertical level values from PP headers as CM data components. Args: @@ -303,8 +302,7 @@ def _convert_vertical_coords( def _reshape_vector_args(values_and_dims): - """ - Reshape a group of (array, dimensions-mapping) onto all dimensions. + """Reshape a group of (array, dimensions-mapping) onto all dimensions. The resulting arrays are all mapped over the same dimensions; as many as the maximum dimension number found in the inputs. Those dimensions not @@ -352,8 +350,7 @@ def _reshape_vector_args(values_and_dims): def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): - """ - Collapse points (and optionally bounds) in any dimensions over which all + """Collapse points (and optionally bounds) in any dimensions over which all values are the same. All dimensions are tested, and if degenerate are reduced to length 1. @@ -400,8 +397,7 @@ def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): - """ - Reduce the dimensionality of arrays of coordinate points (and optionally + """Reduce the dimensionality of arrays of coordinate points (and optionally bounds). Dimensions over which all values are the same are reduced to size 1, using @@ -460,8 +456,7 @@ def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): def _new_coord_and_dims( is_vector_operation, name, units, points, lower_and_upper_bounds=None ): - """ - Make a new (coordinate, cube_dims) pair with the given points, name, units + """Make a new (coordinate, cube_dims) pair with the given points, name, units and optional bounds. In 'vector' style operation, the data arrays must have same number of @@ -507,8 +502,7 @@ def _new_coord_and_dims( def _epoch_date_hours_internals(epoch_hours_unit, datetime): - """ - Return an 'hours since epoch' number for a date. + """Return an 'hours since epoch' number for a date. Args: * epoch_hours_unit (:class:`cf_unit.Unit'): @@ -616,8 +610,7 @@ def _convert_time_coords( t2_dims=(), lbft_dims=(), ): - """ - Make time coordinates from the time metadata. + """Make time coordinates from the time metadata. Args: @@ -828,8 +821,7 @@ def date2year(t_in): def _model_level_number(lblev): - """ - Return model level number for an LBLEV value. + """Return model level number for an LBLEV value. Args: @@ -852,8 +844,7 @@ def _model_level_number(lblev): def _convert_scalar_realization_coords(lbrsvd4): - """ - Encode scalar 'realization' (aka ensemble) numbers as CM data. + """Encode scalar 'realization' (aka ensemble) numbers as CM data. Returns a list of coords_and_dims. @@ -868,8 +859,7 @@ def _convert_scalar_realization_coords(lbrsvd4): def _convert_scalar_pseudo_level_coords(lbuser5): - """ - Encode scalar pseudo-level values as CM data. + """Encode scalar pseudo-level values as CM data. Returns a list of coords_and_dims. @@ -883,8 +873,7 @@ def _convert_scalar_pseudo_level_coords(lbuser5): def convert(f): - """ - Converts a PP field into the corresponding items of Cube metadata. + """Converts a PP field into the corresponding items of Cube metadata. Args: @@ -959,8 +948,7 @@ def convert(f): def _all_other_rules(f): - """ - This deals with all the other rules that have not been factored into any of + """This deals with all the other rules that have not been factored into any of the other convert_scalar_coordinate functions above. """ diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 0d26061ac7..376c4a3632 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -24,8 +24,7 @@ def _basic_coord_system_rules(cube, pp): - """ - Rules for setting the coord system of the PP field. + """Rules for setting the coord system of the PP field. Args: cube: the cube being saved as a series of PP fields. @@ -75,8 +74,7 @@ def _um_version_rules(cube, pp): def _stash_rules(cube, pp): - """ - Attributes rules for setting the STASH attribute of the PP field. + """Attributes rules for setting the STASH attribute of the PP field. Args: cube: the cube being saved as a series of PP fields. @@ -95,8 +93,7 @@ def _stash_rules(cube, pp): def _general_time_rules(cube, pp): - """ - Rules for setting time metadata of the PP field. + """Rules for setting time metadata of the PP field. Args: cube: the cube being saved as a series of PP fields. @@ -366,8 +363,7 @@ def _general_time_rules(cube, pp): def _calendar_rules(cube, pp): - """ - Rules for setting the calendar of the PP field. + """Rules for setting the calendar of the PP field. Args: cube: the cube being saved as a series of PP fields. @@ -389,8 +385,7 @@ def _calendar_rules(cube, pp): def _grid_and_pole_rules(cube, pp): - """ - Rules for setting the horizontal grid and pole location of the PP field. + """Rules for setting the horizontal grid and pole location of the PP field. Args: cube: the cube being saved as a series of PP fields. @@ -468,8 +463,7 @@ def _grid_and_pole_rules(cube, pp): def _non_std_cross_section_rules(cube, pp): - """ - Rules for applying non-standard cross-sections to the PP field. + """Rules for applying non-standard cross-sections to the PP field. Args: cube: the cube being saved as a series of PP fields. @@ -596,8 +590,7 @@ def _non_std_cross_section_rules(cube, pp): def _lbproc_rules(cube, pp): - """ - Rules for setting the processing code of the PP field. + """Rules for setting the processing code of the PP field. Note: `pp.lbproc` must be set to 0 before these rules are run. @@ -641,8 +634,7 @@ def _lbproc_rules(cube, pp): def _vertical_rules(cube, pp): - """ - Rules for setting vertical levels for the PP field. + """Rules for setting vertical levels for the PP field. Args: cube: the cube being saved as a series of PP fields. @@ -823,8 +815,7 @@ def _vertical_rules(cube, pp): def _all_other_rules(cube, pp): - """ - Fields currently managed by these rules: + """Fields currently managed by these rules: * lbfc (field code) * lbrsvd[3] (ensemble member number) diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 4dbaa8bb51..e2bfd250fd 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Generalised mechanisms for metadata translation and cube construction. +"""Generalised mechanisms for metadata translation and cube construction. """ @@ -103,8 +102,7 @@ def scalar_cell_method(cube, method, coord_name): def has_aux_factory(cube, aux_factory_class): - """ - Try to find an class:`~iris.aux_factory.AuxCoordFactory` instance of the + """Try to find an class:`~iris.aux_factory.AuxCoordFactory` instance of the specified type on the cube. """ @@ -115,8 +113,7 @@ def has_aux_factory(cube, aux_factory_class): def aux_factory(cube, aux_factory_class): - """ - Return the class:`~iris.aux_factory.AuxCoordFactory` instance of the + """Return the class:`~iris.aux_factory.AuxCoordFactory` instance of the specified type from a cube. """ @@ -200,8 +197,7 @@ def _regrid_to_target(src_cube, target_coords, target_cube): def _ensure_aligned(regrid_cache, src_cube, target_cube): - """ - Returns a version of `src_cube` suitable for use as an AuxCoord + """Returns a version of `src_cube` suitable for use as an AuxCoord on `target_cube`, or None if no version can be made. """ @@ -255,8 +251,7 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): class Loader(collections.namedtuple("Loader", _loader_attrs)): def __new__(cls, field_generator, field_generator_kwargs, converter): - """ - Create a definition of a field-based Cube loader. + """Create a definition of a field-based Cube loader. Args: @@ -390,8 +385,7 @@ def _load_pairs_from_fields_and_filenames( def load_pairs_from_fields(fields, converter): - """ - Convert an iterable of fields into an iterable of Cubes using the + """Convert an iterable of fields into an iterable of Cubes using the provided converter. Args: diff --git a/lib/iris/fileformats/um/__init__.py b/lib/iris/fileformats/um/__init__.py index ac38e45de5..3a4bd6c516 100644 --- a/lib/iris/fileformats/um/__init__.py +++ b/lib/iris/fileformats/um/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides iris loading support for UM Fieldsfile-like file types, and PP. +"""Provides iris loading support for UM Fieldsfile-like file types, and PP. At present, the only UM file types supported are true FieldsFiles and LBCs. Other types of UM file may fail to load correctly (or at all). diff --git a/lib/iris/fileformats/um/_fast_load.py b/lib/iris/fileformats/um/_fast_load.py index a75d7b16f4..6ab4f20374 100644 --- a/lib/iris/fileformats/um/_fast_load.py +++ b/lib/iris/fileformats/um/_fast_load.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Support for "fast" loading of structured UM files in iris load functions, +"""Support for "fast" loading of structured UM files in iris load functions, i.e. :meth:`iris.load` and its associates. This provides a context manager to enable structured loading via all the iris @@ -48,8 +47,7 @@ class FieldCollation(BasicFieldCollation): # class, now renamed 'BasicFieldCollation'. def __init__(self, fields, filepath): - """ - Args: + """Args: * fields (iterable of :class:`iris.fileformats.pp.PPField`): The fields in the collation. @@ -67,8 +65,7 @@ def data_filepath(self): @property def data_field_indices(self): - """ - Field indices of the contained PPFields in the input file. + """Field indices of the contained PPFields in the input file. This records the original file location of the individual data fields contained, within the input datafile. @@ -155,8 +152,7 @@ def iter_fields_decorated_with_load_indices(fields_iter): def _convert_collation(collation): - """ - Converts a FieldCollation into the corresponding items of Cube + """Converts a FieldCollation into the corresponding items of Cube metadata. Args: @@ -397,8 +393,7 @@ def context(self, loads_use_structured=None, structured_load_is_raw=None): @contextmanager def structured_um_loading(): - """ - Load cubes from structured UM Fieldsfile and PP files. + """Load cubes from structured UM Fieldsfile and PP files. "Structured" loading is a streamlined, fast load operation, to be used **only** on fieldsfiles or PP files whose fields repeat regularly over @@ -541,8 +536,7 @@ def structured_um_loading(): @contextmanager def _raw_structured_loading(): - """ - Private context manager called by :func:`iris.load_raw` to prevent + """Private context manager called by :func:`iris.load_raw` to prevent structured loading from concatenating its result cubes in that case. """ diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py index 44a9520c8b..26f2816891 100644 --- a/lib/iris/fileformats/um/_fast_load_structured_fields.py +++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Code for fast loading of structured UM data. +"""Code for fast loading of structured UM data. This module defines which pp-field elements take part in structured loading, and provides creation of :class:`BasicFieldCollation` objects from lists of @@ -21,8 +20,7 @@ class BasicFieldCollation: - """ - An object representing a group of UM fields with array structure that can + """An object representing a group of UM fields with array structure that can be vectorized into a single cube. For example: @@ -47,8 +45,7 @@ class BasicFieldCollation: """ def __init__(self, fields): - """ - Args: + """Args: * fields (iterable of :class:`iris.fileformats.pp.PPField`): The fields in the collation. @@ -111,8 +108,7 @@ def _UNUSED_primary_dimension_elements(self): @property def element_arrays_and_dims(self): - """ - Value arrays for vector metadata elements. + """Value arrays for vector metadata elements. A dictionary mapping element_name: (value_array, dims). @@ -168,8 +164,7 @@ def t2_fn(fld): _TIME_ELEMENT_MULTIPLIERS = np.cumprod([1, 60, 60, 24, 31, 12])[::-1] def _time_comparable_int(self, yr, mon, dat, hr, min, sec): - """ - Return a single unique number representing a date-time tuple. + """Return a single unique number representing a date-time tuple. This calculation takes no account of the time field's real calendar, instead giving every month 31 days, which preserves the required @@ -237,8 +232,7 @@ def _calculate_structure(self): def _um_collation_key_function(field): - """ - Standard collation key definition for fast structured field loading. + """Standard collation key definition for fast structured field loading. The elements used here are the minimum sufficient to define the 'phenomenon', as described for :meth:`group_structured_fields`. @@ -268,8 +262,7 @@ def _um_collation_key_function(field): def group_structured_fields( field_iterator, collation_class=BasicFieldCollation, **collation_kwargs ): - """ - Collect structured fields into identified groups whose fields can be + """Collect structured fields into identified groups whose fields can be combined to form a single cube. Args: diff --git a/lib/iris/fileformats/um/_ff_replacement.py b/lib/iris/fileformats/um/_ff_replacement.py index 33ab2fbb68..d726f63a10 100644 --- a/lib/iris/fileformats/um/_ff_replacement.py +++ b/lib/iris/fileformats/um/_ff_replacement.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Support for UM "fieldsfile-like" files. +"""Support for UM "fieldsfile-like" files. At present, the only UM file types supported are true FieldsFiles and LBCs. Other types of UM file may fail to load correctly (or at all). @@ -15,8 +14,7 @@ def um_to_pp(filename, read_data=False, word_depth=None): - """ - Extract individual PPFields from within a UM Fieldsfile-like file. + """Extract individual PPFields from within a UM Fieldsfile-like file. Returns an iterator over the fields contained within the FieldsFile, returned as :class:`iris.fileformats.pp.PPField` instances. @@ -52,8 +50,7 @@ def um_to_pp(filename, read_data=False, word_depth=None): def load_cubes(filenames, callback, constraints=None, _loader_kwargs=None): - """ - Loads cubes from filenames of UM fieldsfile-like files. + """Loads cubes from filenames of UM fieldsfile-like files. Args: @@ -81,8 +78,7 @@ def load_cubes(filenames, callback, constraints=None, _loader_kwargs=None): def load_cubes_32bit_ieee(filenames, callback, constraints=None): - """ - Loads cubes from filenames of 32bit ieee converted UM fieldsfile-like + """Loads cubes from filenames of 32bit ieee converted UM fieldsfile-like files. .. seealso:: diff --git a/lib/iris/fileformats/um/_optimal_array_structuring.py b/lib/iris/fileformats/um/_optimal_array_structuring.py index b3a8bdc40d..b43c4a2e50 100644 --- a/lib/iris/fileformats/um/_optimal_array_structuring.py +++ b/lib/iris/fileformats/um/_optimal_array_structuring.py @@ -9,8 +9,7 @@ def _optimal_dimensioning_structure(structure, element_priorities): - """ - Uses the structure options provided by the + """Uses the structure options provided by the :class:`~iris.fileformats._structured_array_identification.GroupStructure` to determine the optimal array structure for the :class:`FieldCollation`. @@ -50,8 +49,7 @@ def _optimal_dimensioning_structure(structure, element_priorities): def optimal_array_structure(ordering_elements, actual_values_elements=None): - """ - Calculate an optimal array replication structure for a set of vectors. + """Calculate an optimal array replication structure for a set of vectors. Args: diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index ca205d01f5..2a056c788c 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provide convenient file format identification. +"""Provide convenient file format identification. A module to provide convenient file format identification through a combination of filename extension and file based *magic* numbers. @@ -49,8 +48,7 @@ class FormatAgent: - """ - Identifies format of a given file by interrogating its children instances. + """Identifies format of a given file by interrogating its children instances. The FormatAgent class is the containing object which is responsible for identifying the format of a given file by interrogating its children @@ -87,8 +85,7 @@ def __str__(self): ) def get_spec(self, basename, buffer_obj): - """ - Pick the first FormatSpecification. + """Pick the first FormatSpecification. Pick the first FormatSpecification which can handle the given filename and file/buffer object. @@ -155,8 +152,7 @@ def get_spec(self, basename, buffer_obj): @functools.total_ordering class FormatSpecification: - """ - Provides the base class for file type definition. + """Provides the base class for file type definition. Every FormatSpecification instance has a name which can be accessed with the :attr:`FormatSpecification.name` property and a FileElement, such as @@ -174,8 +170,7 @@ def __init__( priority=0, constraint_aware_handler=False, ): - """ - Construct a new FormatSpecification. + """Construct a new FormatSpecification. Parameters ---------- @@ -272,8 +267,7 @@ def __str__(self): class FileElement: - """ - Represents a specific aspect of a FileFormat. + """Represents a specific aspect of a FileFormat. Represents a specific aspect of a FileFormat which can be identified using the given element getter function. @@ -281,8 +275,7 @@ class FileElement: """ def __init__(self, requires_fh=True): - """ - Construct a new file element, which may require a file buffer. + """Construct a new file element, which may require a file buffer. Parameters ---------- @@ -348,8 +341,7 @@ def get_element(self, basename, file_handle): class UriProtocol(FileElement): - """ - Return the scheme and part from a URI, using :func:`~iris.io.decode_uri`. + """Return the scheme and part from a URI, using :func:`~iris.io.decode_uri`. A :class:`FileElement` that returns the "scheme" and "part" from a URI, using :func:`~iris.io.decode_uri`. @@ -367,8 +359,7 @@ def get_element(self, basename, file_handle): class DataSourceObjectProtocol(FileElement): - """ - A :class:`FileElement` that simply returns the URI entry itself. + """A :class:`FileElement` that simply returns the URI entry itself. This enables a arbitrary non-string data object to be passed, subject to subsequent checks on the object itself (specified in the handler). @@ -379,8 +370,7 @@ def __init__(self): super().__init__(requires_fh=False) def get_element(self, basename, file_handle): - """ - In this context, there should *not* be a file opened by the handler. + """In this context, there should *not* be a file opened by the handler. Just return 'basename', which in this case is not a name, or even a string, but a passed 'data object'. diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index e85c670433..c4565d05c9 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Cube functions for iteration in step. +"""Cube functions for iteration in step. """ @@ -19,8 +18,7 @@ def izip(*cubes, **kwargs): - """ - Return an iterator for iterating over a collection of cubes in step. + """Return an iterator for iterating over a collection of cubes in step. If the input cubes have dimensions for which there are no common coordinates, those dimensions will be treated as orthogonal. The @@ -171,8 +169,7 @@ def izip(*cubes, **kwargs): class _ZipSlicesIterator(Iterator): - """ - Extension to _SlicesIterator (see cube.py) to support iteration over a + """Extension to _SlicesIterator (see cube.py) to support iteration over a collection of cubes in step. """ @@ -284,8 +281,7 @@ def __next__(self): class _CoordWrapper: - """ - Class for creating a coordinate wrapper that allows the use of an + """Class for creating a coordinate wrapper that allows the use of an alternative equality function based on metadata rather than metadata + points/bounds. diff --git a/lib/iris/palette.py b/lib/iris/palette.py index 7bfde5b6b6..acea46432a 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Load, configure and register color map palettes and initialise +"""Load, configure and register color map palettes and initialise color map meta-data mappings. """ @@ -36,8 +35,7 @@ def is_brewer(cmap): - """ - Determine whether the color map is a Cynthia Brewer color map. + """Determine whether the color map is a Cynthia Brewer color map. Args: @@ -55,8 +53,7 @@ def is_brewer(cmap): def _default_cmap_norm(args, kwargs): - """ - This function injects default cmap and norm behaviour into the keyword + """This function injects default cmap and norm behaviour into the keyword arguments, based on the cube referenced within the positional arguments. """ cube = None @@ -106,8 +103,7 @@ def _default_cmap_norm(args, kwargs): def cmap_norm(cube): - """ - Determine the default :class:`matplotlib.colors.LinearSegmentedColormap` + """Determine the default :class:`matplotlib.colors.LinearSegmentedColormap` and :class:`iris.palette.SymmetricNormalize` instances associated with the cube. @@ -131,8 +127,7 @@ def cmap_norm(cube): def auto_palette(func): - """ - Decorator wrapper function to control the default behaviour of the + """Decorator wrapper function to control the default behaviour of the matplotlib cmap and norm keyword arguments. Args: @@ -147,8 +142,7 @@ def auto_palette(func): @wraps(func) def wrapper_func(*args, **kwargs): - """ - Closure wrapper function to provide default keyword argument + """Closure wrapper function to provide default keyword argument behaviour. """ @@ -162,9 +156,7 @@ def wrapper_func(*args, **kwargs): class SymmetricNormalize(mpl_colors.Normalize): - """ - Provides a symmetric normalization class around a given pivot point. - """ + """Provides a symmetric normalization class around a given pivot point.""" def __init__(self, pivot, *args, **kwargs): self.pivot = pivot @@ -219,8 +211,7 @@ def vmax(self, val): def _load_palette(): - """ - Load, configure and register color map palettes and initialise + """Load, configure and register color map palettes and initialise color map metadata mappings. """ diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 31951c8537..c5b4b0b498 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provide conversion to and from Pandas data structures. +"""Provide conversion to and from Pandas data structures. See also: https://pandas.pydata.org/ @@ -32,8 +31,7 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): - """ - Create a Coord or other dimensional metadata from a Pandas index or columns array. + """Create a Coord or other dimensional metadata from a Pandas index or columns array. If no calendar is specified for a time series, Standard is assumed. @@ -78,9 +76,7 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): def _add_iris_coord(cube, name, points, dim, calendar=None): - """ - Add a Coord or other dimensional metadata to a Cube from a Pandas index or columns array. - """ + """Add a Coord or other dimensional metadata to a Cube from a Pandas index or columns array.""" # Most functionality has been abstracted to _get_dimensional_metadata, # allowing reuse in as_cube() and as_cubes(). coord = _get_dimensional_metadata(name, points, calendar) @@ -92,8 +88,7 @@ def _add_iris_coord(cube, name, points, dim, calendar=None): def _series_index_unique(pandas_series: pandas.Series): - """ - Find an index grouping of a :class:`pandas.Series` that has just one Series value per group. + """Find an index grouping of a :class:`pandas.Series` that has just one Series value per group. Iterates through grouping single index levels, then combinations of 2 levels, then 3 etcetera, until single :class:`~pandas.Series` values per @@ -127,8 +122,7 @@ def as_cube( copy=True, calendars=None, ): - """ - Convert a Pandas Series/DataFrame into a 1D/2D Iris Cube. + """Convert a Pandas Series/DataFrame into a 1D/2D Iris Cube. .. deprecated:: 3.3.0 @@ -198,8 +192,7 @@ def as_cubes( cell_measure_cols=None, ancillary_variable_cols=None, ): - """ - Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. + """Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. The index of `pandas_structure` will be used for generating the :class:`~iris.cube.Cube` dimension(s) and :class:`~iris.coords.DimCoord`\\ s. @@ -569,8 +562,7 @@ def _make_cell_measures_list(cube): def as_series(cube, copy=True): - """ - Convert a 1D cube to a Pandas Series. + """Convert a 1D cube to a Pandas Series. .. deprecated:: 3.4.0 This function is scheduled for removal in a future release, being @@ -625,8 +617,7 @@ def as_data_frame( add_cell_measures=False, add_ancillary_variables=False, ): - """ - Convert a :class:`~iris.cube.Cube` to a :class:`pandas.DataFrame`. + """Convert a :class:`~iris.cube.Cube` to a :class:`pandas.DataFrame`. :attr:`~iris.cube.Cube.dim_coords` and :attr:`~iris.cube.Cube.data` are flattened into a long-style :class:`~pandas.DataFrame`. Other diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 2dbadab2c9..667af06af1 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` +"""Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` interface. See also: :ref:`matplotlib `. @@ -134,8 +133,7 @@ def _valid_bound_dim_coord(coord): def _get_plot_defn(cube, mode, ndims=2): - """ - Return data and plot-axis coords given a cube & a mode of either + """Return data and plot-axis coords given a cube & a mode of either POINT_MODE or BOUND_MODE. """ @@ -257,8 +255,7 @@ def ticker_func(tick_location, _): def _invert_yaxis(v_coord, axes=None): - """ - Inverts the y-axis of the current plot based on conditions: + """Inverts the y-axis of the current plot based on conditions: * If the y-axis is already inverted we don't want to re-invert it. * If v_coord is None then it will not have any attributes. @@ -279,8 +276,7 @@ def _invert_yaxis(v_coord, axes=None): def _check_bounds_contiguity_and_mask(coord, data, atol=None, rtol=None): - """ - Checks that any discontiguities in the bounds of the given coordinate only + """Checks that any discontiguities in the bounds of the given coordinate only occur where the data is masked. Where a discontinuity occurs the grid created for plotting will not be @@ -703,8 +699,7 @@ def _get_geodesic_params(globe): def _shift_plot_sections(u_object, u, v): - """ - Shifts subsections of u by multiples of 360 degrees within ranges + """Shifts subsections of u by multiples of 360 degrees within ranges defined by the points where the line should cross over the 0/360 degree longitude boundary. @@ -815,8 +810,7 @@ def _draw_1d_from_points(draw_method_name, arg_func, *args, **kwargs): def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): - """ - This function is equivalend to _draw_two_1d_from_points but expects two + """This function is equivalend to _draw_two_1d_from_points but expects two y-axis variables rather than one (such as is required for .fill_between). It can't be used where the y-axis variables are string coordinates. The y-axis variable provided first has precedence where the two differ on whether the @@ -859,8 +853,7 @@ def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): def _replace_axes_with_cartopy_axes(cartopy_proj): - """ - Replace non-cartopy subplot/axes with a cartopy alternative + """Replace non-cartopy subplot/axes with a cartopy alternative based on the provided projection. If the current axes are already an instance of :class:`cartopy.mpl.geoaxes.GeoAxes` then no action is taken. @@ -892,8 +885,7 @@ def _replace_axes_with_cartopy_axes(cartopy_proj): def _ensure_cartopy_axes_and_determine_kwargs(x_coord, y_coord, kwargs): - """ - Replace the current non-cartopy axes with + """Replace the current non-cartopy axes with :class:`cartopy.mpl.geoaxes.GeoAxes` and return the appropriate kwargs dict based on the provided coordinates and kwargs. @@ -959,8 +951,7 @@ def _check_geostationary_coords_and_convert(x, y, kwargs): def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwargs): - """ - Draw the given cube on a map using its points or bounds. + """Draw the given cube on a map using its points or bounds. "Mode" parameter will switch functionality between POINT or BOUND plotting. @@ -1047,8 +1038,7 @@ def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwar def contour(cube, *args, **kwargs): - """ - Draws contour lines based on the given Cube. + """Draws contour lines based on the given Cube. Kwargs: @@ -1077,8 +1067,7 @@ def contour(cube, *args, **kwargs): def contourf(cube, *args, **kwargs): - """ - Draws filled contours based on the given Cube. + """Draws filled contours based on the given Cube. Kwargs: @@ -1162,8 +1151,7 @@ def contourf(cube, *args, **kwargs): def default_projection(cube): - """ - Return the primary map projection for the given cube. + """Return the primary map projection for the given cube. Using the returned projection, one can create a cartopy map with:: @@ -1183,8 +1171,7 @@ def default_projection(cube): def default_projection_extent(cube, mode=iris.coords.POINT_MODE): - """ - Return the cube's extents ``(x0, x1, y0, y1)`` in its default projection. + """Return the cube's extents ``(x0, x1, y0, y1)`` in its default projection. Keyword arguments: @@ -1300,8 +1287,7 @@ def horiz_plot(v_coord, orography, style_args): def outline(cube, coords=None, color="k", linewidth=None, axes=None): - """ - Draws cell outlines based on the given Cube. + """Draws cell outlines based on the given Cube. Kwargs: @@ -1345,8 +1331,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): def pcolor(cube, *args, **kwargs): - """ - Draws a pseudocolor plot based on the given 2-dimensional Cube. + """Draws a pseudocolor plot based on the given 2-dimensional Cube. The cube must have either two 1-dimensional coordinates or two 2-dimensional coordinates with contiguous bounds to plot the cube against. @@ -1384,8 +1369,7 @@ def pcolor(cube, *args, **kwargs): def pcolormesh(cube, *args, **kwargs): - """ - Draws a pseudocolor plot based on the given 2-dimensional Cube. + """Draws a pseudocolor plot based on the given 2-dimensional Cube. The cube must have either two 1-dimensional coordinates or two 2-dimensional coordinates with contiguous bounds to plot against each @@ -1421,8 +1405,7 @@ def pcolormesh(cube, *args, **kwargs): def points(cube, *args, **kwargs): - """ - Draws sample point positions based on the given Cube. + """Draws sample point positions based on the given Cube. Kwargs: @@ -1454,8 +1437,7 @@ def _scatter_args(u, v, data, *args, **kwargs): def _vector_component_args(x_points, y_points, u_data, *args, **kwargs): - """ - Callback from _draw_2d_from_points for 'quiver' and 'streamlines'. + """Callback from _draw_2d_from_points for 'quiver' and 'streamlines'. Returns arguments (x, y, u, v), to be passed to the underlying matplotlib call. @@ -1495,8 +1477,7 @@ def _vector_component_args(x_points, y_points, u_data, *args, **kwargs): def barbs(u_cube, v_cube, *args, **kwargs): - """ - Draws a barb plot from two vector component cubes. Triangles, full-lines + """Draws a barb plot from two vector component cubes. Triangles, full-lines and half-lines represent increments of 50, 10 and 5 respectively. Args: @@ -1550,8 +1531,7 @@ def barbs(u_cube, v_cube, *args, **kwargs): def quiver(u_cube, v_cube, *args, **kwargs): - """ - Draws an arrow plot from two vector component cubes. + """Draws an arrow plot from two vector component cubes. Args: @@ -1605,8 +1585,7 @@ def quiver(u_cube, v_cube, *args, **kwargs): def plot(*args, **kwargs): - """ - Draws a line plot based on the given cube(s) or coordinate(s). + """Draws a line plot based on the given cube(s) or coordinate(s). The first one or two arguments may be cubes or coordinates to plot. Each of the following is valid:: @@ -1657,8 +1636,7 @@ def plot(*args, **kwargs): def scatter(x, y, *args, **kwargs): - """ - Draws a scatter plot based on the given cube(s) or coordinate(s). + """Draws a scatter plot based on the given cube(s) or coordinate(s). Args: @@ -1694,8 +1672,7 @@ def scatter(x, y, *args, **kwargs): def fill_between(x, y1, y2, *args, **kwargs): - """ - Plots y1 and y2 against x, and fills the space between them. + """Plots y1 and y2 against x, and fills the space between them. Args: @@ -1736,8 +1713,7 @@ def fill_between(x, y1, y2, *args, **kwargs): def hist(x, *args, **kwargs): - """ - Compute and plot a histogram. + """Compute and plot a histogram. Args: @@ -1774,8 +1750,7 @@ def hist(x, *args, **kwargs): def symbols(x, y, symbols, size, axes=None, units="inches"): - """ - Draws fixed-size symbols. + """Draws fixed-size symbols. See :mod:`iris.symbols` for available symbols. @@ -1846,8 +1821,7 @@ def symbols(x, y, symbols, size, axes=None, units="inches"): def citation(text, figure=None, axes=None): - """ - Add a text citation to a plot. + """Add a text citation to a plot. Places an anchored text citation in the bottom right hand corner of the plot. @@ -1878,8 +1852,7 @@ def citation(text, figure=None, axes=None): def animate(cube_iterator, plot_func, fig=None, **kwargs): - """ - Animates the given cube iterator. + """Animates the given cube iterator. Parameters ---------- diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 6523959420..8ed76866aa 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -High-level plotting extensions to :mod:`iris.plot`. +"""High-level plotting extensions to :mod:`iris.plot`. These routines work much like their :mod:`iris.plot` counterparts, but they automatically add a plot title, axis titles, and a colour bar when appropriate. @@ -137,8 +136,7 @@ def _label_1d_plot(*args, **kwargs): def contour(cube, *args, **kwargs): - """ - Draws contour lines on a labelled plot based on the given Cube. + """Draws contour lines on a labelled plot based on the given Cube. With the basic call signature, contour "level" values are chosen automatically:: @@ -169,8 +167,7 @@ def contour(cube, *args, **kwargs): def contourf(cube, *args, **kwargs): - """ - Draws filled contours on a labelled plot based on the given Cube. + """Draws filled contours on a labelled plot based on the given Cube. With the basic call signature, contour "level" values are chosen automatically:: @@ -200,8 +197,7 @@ def contourf(cube, *args, **kwargs): def outline(cube, coords=None, color="k", linewidth=None, axes=None): - """ - Draws cell outlines on a labelled plot based on the given Cube. + """Draws cell outlines on a labelled plot based on the given Cube. Kwargs: @@ -234,8 +230,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): def pcolor(cube, *args, **kwargs): - """ - Draws a labelled pseudocolor plot based on the given Cube. + """Draws a labelled pseudocolor plot based on the given Cube. See :func:`iris.plot.pcolor` for details of valid keyword arguments. @@ -252,8 +247,7 @@ def pcolor(cube, *args, **kwargs): def pcolormesh(cube, *args, **kwargs): - """ - Draws a labelled pseudocolour plot based on the given Cube. + """Draws a labelled pseudocolour plot based on the given Cube. See :func:`iris.plot.pcolormesh` for details of valid keyword arguments. @@ -271,8 +265,7 @@ def pcolormesh(cube, *args, **kwargs): def points(cube, *args, **kwargs): - """ - Draws sample point positions on a labelled plot based on the given Cube. + """Draws sample point positions on a labelled plot based on the given Cube. See :func:`iris.plot.points` for details of valid keyword arguments. @@ -290,8 +283,7 @@ def points(cube, *args, **kwargs): def plot(*args, **kwargs): - """ - Draws a labelled line plot based on the given cube(s) or + """Draws a labelled line plot based on the given cube(s) or coordinate(s). See :func:`iris.plot.plot` for details of valid arguments and @@ -310,8 +302,7 @@ def plot(*args, **kwargs): def scatter(x, y, *args, **kwargs): - """ - Draws a labelled scatter plot based on the given cubes or + """Draws a labelled scatter plot based on the given cubes or coordinates. See :func:`iris.plot.scatter` for details of valid arguments and @@ -330,8 +321,7 @@ def scatter(x, y, *args, **kwargs): def fill_between(x, y1, y2, *args, **kwargs): - """ - Draws a labelled fill_between plot based on the given cubes or coordinates. + """Draws a labelled fill_between plot based on the given cubes or coordinates. See :func:`iris.plot.fill_between` for details of valid arguments and keyword arguments. @@ -348,8 +338,7 @@ def fill_between(x, y1, y2, *args, **kwargs): def hist(x, *args, **kwargs): - """ - Compute and plot a labelled histogram. + """Compute and plot a labelled histogram. See :func:`iris.plot.hist` for details of valid arguments and keyword arguments. diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index ce9ee51771..b55471dadd 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Contains symbol definitions for use with :func:`iris.plot.symbols`. +"""Contains symbol definitions for use with :func:`iris.plot.symbols`. """ @@ -196,8 +195,7 @@ def _backslash_path(): def _wedge_fix(wedge_path): - """ - Fixes the problem with Path.wedge where it doesn't initialise the first, + """Fixes the problem with Path.wedge where it doesn't initialise the first, and last two vertices. This fix should not have any side-effects once Path.wedge has been fixed, but will then be redundant and should be removed. diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index d4ea42f8d9..94c26ef45f 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides testing capabilities and customisations specific to Iris. +"""Provides testing capabilities and customisations specific to Iris. .. note:: This module needs to control the matplotlib backend, so it **must** be imported before ``matplotlib.pyplot``. @@ -153,8 +152,7 @@ def _assert_masked_array(assertion, a, b, strict, **kwargs): def assert_masked_array_equal(a, b, strict=False): - """ - Check that masked arrays are equal. This requires the + """Check that masked arrays are equal. This requires the unmasked values and masks to be identical. Args: @@ -174,8 +172,7 @@ def assert_masked_array_equal(a, b, strict=False): def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): - """ - Check that masked arrays are almost equal. This requires the + """Check that masked arrays are almost equal. This requires the masks to be identical, and the unmasked values to be almost equal. @@ -233,8 +230,7 @@ def _assert_str_same( @staticmethod def get_data_path(relative_path): - """ - Return the absolute path to a data file when given the relative path + """Return the absolute path to a data file when given the relative path as a string, or sequence of strings. """ @@ -270,8 +266,7 @@ def get_data_path(relative_path): @staticmethod def get_result_path(relative_path): - """ - Returns the absolute path to a result file when given the relative path + """Returns the absolute path to a result file when given the relative path as a string, or sequence of strings. """ @@ -280,8 +275,7 @@ def get_result_path(relative_path): return os.path.abspath(os.path.join(_RESULT_PATH, relative_path)) def result_path(self, basename=None, ext=""): - """ - Return the full path to a test result, generated from the \ + """Return the full path to a test result, generated from the \ calling file, class and, optionally, method. Optional kwargs : @@ -333,8 +327,7 @@ def assertCMLApproxData(self, cubes, reference_filename=None, **kwargs): self.assertCML(cubes, reference_filename, checksum=False) def assertCDL(self, netcdf_filename, reference_filename=None, flags="-h"): - """ - Test that the CDL for the given netCDF file matches the contents + """Test that the CDL for the given netCDF file matches the contents of the reference file. If the environment variable IRIS_TEST_CREATE_MISSING is @@ -402,8 +395,7 @@ def sort_key(line): self._check_same(cdl, reference_path, type_comparison_name="CDL") def assertCML(self, cubes, reference_filename=None, checksum=True): - """ - Test that the CML for the given cubes matches the contents of + """Test that the CML for the given cubes matches the contents of the reference file. If the environment variable IRIS_TEST_CREATE_MISSING is @@ -517,8 +509,7 @@ def assertFilesEqual(self, test_filename, reference_filename): shutil.copy(test_filename, reference_path) def assertString(self, string, reference_filename=None): - """ - Test that `string` matches the contents of the reference file. + """Test that `string` matches the contents of the reference file. If the environment variable IRIS_TEST_CREATE_MISSING is non-empty, the reference file is created if it doesn't exist. @@ -559,10 +550,7 @@ def _check_same(self, item, reference_path, type_comparison_name="CML"): reference_fh.writelines(part.encode("utf-8") for part in item) def assertXMLElement(self, obj, reference_filename): - """ - Calls the xml_element method given obj and asserts the result is the same as the test file. - - """ + """Calls the xml_element method given obj and asserts the result is the same as the test file.""" doc = xml.dom.minidom.Document() doc.appendChild(obj.xml_element(doc)) # sort the attributes on xml elements before testing against known good state. @@ -589,8 +577,7 @@ def _recordWarningMatches(self, expected_regexp=""): @contextlib.contextmanager def assertLogs(self, logger=None, level=None, msg_regex=None): - """ - An extended version of the usual :meth:`unittest.TestCase.assertLogs`, + """An extended version of the usual :meth:`unittest.TestCase.assertLogs`, which also exercises the logger's message formatting. Also adds the ``msg_regex`` kwarg: @@ -641,8 +628,7 @@ def assertArrayAlmostEqual(self, a, b, decimal=6): assertMaskedArrayAlmostEqual = staticmethod(assert_masked_array_almost_equal) def assertArrayAllClose(self, a, b, rtol=1.0e-7, atol=1.0e-8, **kwargs): - """ - Check arrays are equal, within given relative + absolute tolerances. + """Check arrays are equal, within given relative + absolute tolerances. Args: @@ -706,15 +692,12 @@ def temp_filename(self, suffix=""): os.remove(filename) def file_checksum(self, file_path): - """ - Generate checksum from file. - """ + """Generate checksum from file.""" with open(file_path, "rb") as in_file: return zlib.crc32(in_file.read()) def _unique_id(self): - """ - Returns the unique ID for the current assertion. + """Returns the unique ID for the current assertion. The ID is composed of two parts: a unique ID for the current test (which is itself composed of the module, class, and test names), and @@ -766,8 +749,7 @@ def _ensure_folder(self, path): os.makedirs(dir_path) def check_graphic(self): - """ - Check the hash of the current matplotlib figure matches the expected + """Check the hash of the current matplotlib figure matches the expected image hash for the current graphic test. To create missing image test results, set the IRIS_TEST_CREATE_MISSING @@ -790,8 +772,7 @@ def _remove_testcase_patches(self): self.testcase_patches.clear() def patch(self, *args, **kwargs): - """ - Install a mock.patch, to be removed after the current test. + """Install a mock.patch, to be removed after the current test. The patch is created with mock.patch(*args, **kwargs). @@ -826,8 +807,7 @@ def patch(self, *args, **kwargs): return start_result def assertArrayShapeStats(self, result, shape, mean, std_dev, rtol=1e-6): - """ - Assert that the result, a cube, has the provided shape and that the + """Assert that the result, a cube, has the provided shape and that the mean and standard deviation of the data array are also as provided. Thus build confidence that a cube processing operation, such as a cube.regrid, has maintained its behaviour. @@ -838,8 +818,7 @@ def assertArrayShapeStats(self, result, shape, mean, std_dev, rtol=1e-6): self.assertArrayAllClose(result.data.std(), std_dev, rtol=rtol) def assertDictEqual(self, lhs, rhs, msg=None): - """ - This method overrides unittest.TestCase.assertDictEqual (new in Python3.1) + """This method overrides unittest.TestCase.assertDictEqual (new in Python3.1) in order to cope with dictionary comparison where the value of a key may be a numpy array. @@ -918,8 +897,7 @@ class GraphicsTest(graphics.GraphicsTestMixin, IrisTest): def skip_data(fn): - """ - Decorator to choose whether to run tests, based on the availability of + """Decorator to choose whether to run tests, based on the availability of external data. Example usage: @@ -940,8 +918,7 @@ class MyDataTests(tests.IrisTest): def skip_gdal(fn): - """ - Decorator to choose whether to run tests, based on the availability of the + """Decorator to choose whether to run tests, based on the availability of the GDAL library. Example usage: @@ -982,8 +959,7 @@ class MyGeoTiffTests(test.IrisTest): def no_warnings(func): - """ - Provides a decorator to ensure that there are no warnings raised + """Provides a decorator to ensure that there are no warnings raised within the test, otherwise the test will fail. """ @@ -1003,8 +979,7 @@ def wrapped(self, *args, **kwargs): def env_bin_path(exe_name: AnyStr = None): - """ - Return a Path object for (an executable in) the environment bin directory. + """Return a Path object for (an executable in) the environment bin directory. Parameters ---------- diff --git a/lib/iris/tests/experimental/__init__.py b/lib/iris/tests/experimental/__init__.py index d31931720c..951fca1eae 100644 --- a/lib/iris/tests/experimental/__init__.py +++ b/lib/iris/tests/experimental/__init__.py @@ -2,7 +2,6 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Experimental code is tested in this package. +"""Experimental code is tested in this package. """ diff --git a/lib/iris/tests/experimental/regrid/__init__.py b/lib/iris/tests/experimental/regrid/__init__.py index 6837b12e91..be5871a5a6 100644 --- a/lib/iris/tests/experimental/regrid/__init__.py +++ b/lib/iris/tests/experimental/regrid/__init__.py @@ -2,7 +2,6 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Regridding code is tested in this package. +"""Regridding code is tested in this package. """ diff --git a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index 10723f1291..b68c8f1625 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test area weighted regridding. +"""Test area weighted regridding. """ @@ -34,8 +33,7 @@ def _scaled_and_offset_grid( cube, x_scalefactor, y_scalefactor, x_offset=0.0, y_offset=0.0 ): - """ - Return a cube with a horizontal grid that is scaled and offset + """Return a cube with a horizontal grid that is scaled and offset from the horizontal grid of `src`. """ @@ -47,8 +45,7 @@ def _scaled_and_offset_grid( def _subsampled_coord(coord, subsamplefactor): - """ - Return a coordinate that is a subsampled copy of `coord`. + """Return a coordinate that is a subsampled copy of `coord`. .. note:: `subsamplefactor` must be an integer >= 1. @@ -68,8 +65,7 @@ def _subsampled_coord(coord, subsamplefactor): def _subsampled_grid(cube, x_subsamplefactor, y_subsamplefactor): - """ - Return a cube that has a horizontal grid that is a subsampled + """Return a cube that has a horizontal grid that is a subsampled version of the horizontal grid of `cube`. .. note:: The two subsamplefactors must both be integers >= 1. @@ -94,8 +90,7 @@ def _subsampled_grid(cube, x_subsamplefactor, y_subsamplefactor): def _resampled_coord(coord, samplefactor): - """ - Return a coordinate that has the same extent as `coord` but has + """Return a coordinate that has the same extent as `coord` but has `samplefactor` times as many points and bounds. """ @@ -115,8 +110,7 @@ def _resampled_coord(coord, samplefactor): def _resampled_grid(cube, x_samplefactor, y_samplefactor): - """ - Return a cube that has the same horizontal extent as `cube` but has + """Return a cube that has the same horizontal extent as `cube` but has a reduced (or increased) number of points (and bounds) along the X and Y dimensions. @@ -589,8 +583,7 @@ def test_circular_subset(self): @tests.skip_data def test_non_circular_subset(self): - """ - Test regridding behaviour when the source grid has circular latitude. + """Test regridding behaviour when the source grid has circular latitude. This tests the specific case when the longitude coordinate of the source grid has the `circular` attribute as `False` but otherwise spans @@ -626,8 +619,7 @@ def test_non_circular_subset(self): @tests.skip_data def test__proper_non_circular_subset(self): - """ - Test regridding behaviour when the source grid has circular latitude. + """Test regridding behaviour when the source grid has circular latitude. This tests the specific case when the longitude coordinate of the source grid does not span the full 360 degrees. Target cells which span diff --git a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py index e8ac3f1db4..c74bab37fb 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Tests for :func:`iris.experimental.regrid.regrid_conservative_via_esmpy`. +"""Tests for :func:`iris.experimental.regrid.regrid_conservative_via_esmpy`. """ @@ -36,8 +35,7 @@ def _make_test_cube(shape, xlims, ylims, pole_latlon=None): - """ - Create latlon cube (optionally rotated) with given xy dimensions and bounds + """Create latlon cube (optionally rotated) with given xy dimensions and bounds limit values. Produces a regular grid in source coordinates. @@ -86,8 +84,7 @@ def _cube_area_sum(cube): def _reldiff(a, b): - """ - Compute a relative-difference measure between real numbers. + """Compute a relative-difference measure between real numbers. Result is: if a == b == 0: @@ -132,8 +129,7 @@ def setUp(self): self.stock_c1_areasum = _cube_area_sum(c1) def test_simple_areas(self): - """ - Test area-conserving regrid between simple "near-square" grids. + """Test area-conserving regrid between simple "near-square" grids. Grids have overlapping areas in the same (lat-lon) coordinate system. Grids are "nearly flat" lat-lon spaces (small ranges near the equator). @@ -185,8 +181,7 @@ def test_simple_areas(self): self.assertArrayAllClose(c1to2to1_areasum, c1_areasum) def test_simple_missing_data(self): - """ - Check for missing data handling. + """Check for missing data handling. Should mask cells that either .. (a) go partly outside the source grid @@ -226,8 +221,7 @@ def test_simple_missing_data(self): @tests.skip_data def test_multidimensional(self): - """ - Check valid operation on a multidimensional cube. + """Check valid operation on a multidimensional cube. Calculation should repeat across multiple dimensions. Any attached orography is interpolated. @@ -527,8 +521,7 @@ def test_longitude_wraps(self): self.assertEqual(c1shifted_toc2, c1toc2) def test_polar_areas(self): - """ - Test area-conserving regrid between different grids. + """Test area-conserving regrid between different grids. Grids have overlapping areas in the same (lat-lon) coordinate system. Cells are highly non-square (near the pole). @@ -628,8 +621,7 @@ def test_fail_different_cs(self): regrid_conservative_via_esmpy(c1, c2) def test_rotated(self): - """ - Test area-weighted regrid on more complex area. + """Test area-weighted regrid on more complex area. Use two mutually rotated grids, of similar area + same dims. Only a small central region in each is non-zero, which maps entirely @@ -723,8 +715,7 @@ def test_rotated(self): self.assertArrayAllClose(c2toc1_areasum, c2_areasum, rtol=0.004) def test_missing_data_rotated(self): - """ - Check missing-data handling between different coordinate systems. + """Check missing-data handling between different coordinate systems. Regrid between mutually rotated lat/lon systems, and check results for missing data due to grid edge overlap, and source-data masking. diff --git a/lib/iris/tests/experimental/test_raster.py b/lib/iris/tests/experimental/test_raster.py index 3f268a2854..e6ec03cbeb 100644 --- a/lib/iris/tests/experimental/test_raster.py +++ b/lib/iris/tests/experimental/test_raster.py @@ -14,8 +14,7 @@ @tests.skip_data class TestGeoTiffExport(tests.IrisTest): def check_tiff_header(self, tiff_filename, expect_keys, expect_entries): - """ - Checks the given tiff file's metadata contains the expected keys, + """Checks the given tiff file's metadata contains the expected keys, and some matching values (not all). """ diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 0e491c6ed7..c62ac1bf0e 100755 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. # !/usr/bin/env python -""" -Contains Iris graphic testing utilities +"""Contains Iris graphic testing utilities By default, this module sets the matplotlib backend to "agg". But when this module is imported it checks ``sys.argv`` for the flag "-d". If @@ -153,8 +152,7 @@ def fully_qualify(test_id: str, repo: str) -> Dict[str, str]: def check_graphic(test_id: str, results_dir: Union[str, Path]) -> None: - """ - Check the hash of the current matplotlib figure matches the expected + """Check the hash of the current matplotlib figure matches the expected image hash for the current graphic test. To create missing image test results, set the IRIS_TEST_CREATE_MISSING @@ -263,8 +261,7 @@ def tearDown(self) -> None: def skip_plot(fn: Callable) -> Callable: - """ - Decorator to choose whether to run tests, based on the availability of the + """Decorator to choose whether to run tests, based on the availability of the matplotlib library. Example usage: diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index d65adf492b..53d74caf27 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. # !/usr/bin/env python -""" -Provides "diff-like" comparison of images. +"""Provides "diff-like" comparison of images. Currently relies on matplotlib for image processing so limited to PNG format. @@ -36,9 +35,7 @@ def extract_test_key(result_image_name): - """ - Extracts the name of the test which a result image refers to - """ + """Extracts the name of the test which a result image refers to""" name_match = _RESULT_NAME_PATTERN.match(str(result_image_name)) if name_match: test_key = name_match.group(1) diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py index 174bc041f0..96a0e54f2b 100755 --- a/lib/iris/tests/graphics/recreate_imagerepo.py +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. # !/usr/bin/env python -""" -Updates imagerepo.json based on the baseline images +"""Updates imagerepo.json based on the baseline images """ diff --git a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py index 93606b7754..65d76011b9 100644 --- a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integratation tests for the +"""Integratation tests for the `iris.aux_factory.OceanSigmaZFactory` class. """ diff --git a/lib/iris/tests/integration/concatenate/test_concatenate.py b/lib/iris/tests/integration/concatenate/test_concatenate.py index ae32f55e82..13110781dc 100644 --- a/lib/iris/tests/integration/concatenate/test_concatenate.py +++ b/lib/iris/tests/integration/concatenate/test_concatenate.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integration tests for concatenating cubes with differing time coord epochs +"""Integration tests for concatenating cubes with differing time coord epochs using :func:`iris.util.unify_time_units`. """ diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py index 6f76ab14de..63406f1ba0 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_load.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_load.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integration tests for NetCDF-UGRID file loading. +"""Integration tests for NetCDF-UGRID file loading. todo: fold these tests into netcdf tests when experimental.ugrid is folded into standard behaviour. diff --git a/lib/iris/tests/integration/experimental/test_ugrid_save.py b/lib/iris/tests/integration/experimental/test_ugrid_save.py index 02c4f3f852..cbff1d767f 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_save.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_save.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integration tests for NetCDF-UGRID file saving. +"""Integration tests for NetCDF-UGRID file saving. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/merge/test_merge.py b/lib/iris/tests/integration/merge/test_merge.py index 87b52fd85d..4e8562cb60 100644 --- a/lib/iris/tests/integration/merge/test_merge.py +++ b/lib/iris/tests/integration/merge/test_merge.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integration tests for merging cubes. +"""Integration tests for merging cubes. """ diff --git a/lib/iris/tests/integration/netcdf/test__dask_locks.py b/lib/iris/tests/integration/netcdf/test__dask_locks.py index f711e68820..6e1026b29f 100644 --- a/lib/iris/tests/integration/netcdf/test__dask_locks.py +++ b/lib/iris/tests/integration/netcdf/test__dask_locks.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :mod:`iris.fileformats.netcdf._dask_locks` package. +"""Unit tests for the :mod:`iris.fileformats.netcdf._dask_locks` package. Note: these integration tests replace any unit testing of this module, due to its total dependence on Dask, and even on Dask's implementation details rather than supported diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py index b7b21911e5..55ecf2e2ef 100644 --- a/lib/iris/tests/integration/netcdf/test_coord_systems.py +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -203,8 +203,7 @@ def test_save_datum(self): class TestLoadMinimalGeostationary(tests.IrisTest): - """ - Check we can load data with a geostationary grid-mapping, even when the + """Check we can load data with a geostationary grid-mapping, even when the 'false-easting' and 'false_northing' properties are missing. """ diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index c294891cb6..06d04e957d 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integration tests for delayed saving. +"""Integration tests for delayed saving. """ import re import warnings @@ -61,8 +60,7 @@ def make_testcube( cube = realistic_4d() def fix_array(array): - """ - Make a new, custom array to replace the provided cube/coord data. + """Make a new, custom array to replace the provided cube/coord data. Optionally provide default-fill-value collisions, and/or replace with lazy content. """ diff --git a/lib/iris/tests/integration/netcdf/test_thread_safety.py b/lib/iris/tests/integration/netcdf/test_thread_safety.py index 916cbf67e1..b2319364c2 100644 --- a/lib/iris/tests/integration/netcdf/test_thread_safety.py +++ b/lib/iris/tests/integration/netcdf/test_thread_safety.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integration tests covering thread safety during loading/saving netcdf files. +"""Integration tests covering thread safety during loading/saving netcdf files. These tests are intended to catch non-thread-safe behaviour by producing CI 'irregularities' that are noticed and investigated. They cannot reliably @@ -100,8 +99,7 @@ def test_stream_multisource(get_cubes_from_netcdf, save_common): def test_stream_multisource__manychunks( tiny_chunks, get_cubes_from_netcdf, save_common ): - """ - As above, but with many more small chunks. + """As above, but with many more small chunks. As this previously showed additional, sporadic problems which only emerge (statistically) with larger numbers of chunks. @@ -113,8 +111,7 @@ def test_stream_multisource__manychunks( def test_comparison(get_cubes_from_netcdf): - """ - Comparing multiple loaded files forces co-realisation. + """Comparing multiple loaded files forces co-realisation. See :func:`iris._lazy_data._co_realise_lazy_arrays` . """ diff --git a/lib/iris/tests/integration/plot/test_animate.py b/lib/iris/tests/integration/plot/test_animate.py index 1354ef4289..53a6b38797 100644 --- a/lib/iris/tests/integration/plot/test_animate.py +++ b/lib/iris/tests/integration/plot/test_animate.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integration tests for :func:`iris.plot.animate`. +"""Integration tests for :func:`iris.plot.animate`. """ diff --git a/lib/iris/tests/integration/plot/test_colorbar.py b/lib/iris/tests/integration/plot/test_colorbar.py index 82b406abbf..9aa856934c 100644 --- a/lib/iris/tests/integration/plot/test_colorbar.py +++ b/lib/iris/tests/integration/plot/test_colorbar.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test interaction between :mod:`iris.plot` and +"""Test interaction between :mod:`iris.plot` and :func:`matplotlib.pyplot.colorbar` """ diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py index 2b3a59d093..dbe67efd35 100644 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ b/lib/iris/tests/integration/plot/test_netcdftime.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test plot of time coord with non-standard calendar. +"""Test plot of time coord with non-standard calendar. """ diff --git a/lib/iris/tests/integration/plot/test_nzdateline.py b/lib/iris/tests/integration/plot/test_nzdateline.py index b6d12d805a..5a83ac5d89 100644 --- a/lib/iris/tests/integration/plot/test_nzdateline.py +++ b/lib/iris/tests/integration/plot/test_nzdateline.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test set up of limited area map extents which bridge the date line. +"""Test set up of limited area map extents which bridge the date line. """ diff --git a/lib/iris/tests/integration/plot/test_plot_2d_coords.py b/lib/iris/tests/integration/plot/test_plot_2d_coords.py index 673f8817d6..dafddd064d 100644 --- a/lib/iris/tests/integration/plot/test_plot_2d_coords.py +++ b/lib/iris/tests/integration/plot/test_plot_2d_coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test plots with two dimensional coordinates. +"""Test plots with two dimensional coordinates. """ diff --git a/lib/iris/tests/integration/plot/test_vector_plots.py b/lib/iris/tests/integration/plot/test_vector_plots.py index 4c30753dee..08170f6f89 100644 --- a/lib/iris/tests/integration/plot/test_vector_plots.py +++ b/lib/iris/tests/integration/plot/test_vector_plots.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test some key usages of :func:`iris.plot.quiver`. +"""Test some key usages of :func:`iris.plot.quiver`. """ @@ -28,8 +27,7 @@ @tests.skip_plot class MixinVectorPlotCases: - """ - Test examples mixin, used by separate barb, quiver + streamplot classes. + """Test examples mixin, used by separate barb, quiver + streamplot classes. NOTE: at present for barb and quiver only, as streamplot does not support arbitrary coordinates. diff --git a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py index ba14327c9b..dcf61a947f 100644 --- a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py +++ b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Integration tests for loading and saving netcdf file attributes. +"""Integration tests for loading and saving netcdf file attributes. Notes: (1) attributes in netCDF files can be either "global attributes", or variable @@ -98,8 +97,7 @@ def check_captured_warnings( captured_warnings: List[warnings.WarningMessage], allow_possible_legacy_warning: bool = False, ): - """ - Compare captured warning messages with a list of regexp-matches. + """Compare captured warning messages with a list of regexp-matches. We allow them to occur in any order, and replace each actual result in the list with its matching regexp, if any, as this makes failure results much easier to @@ -150,8 +148,7 @@ def check_captured_warnings( class MixinAttrsTesting: @staticmethod def _calling_testname(): - """ - Search up the callstack for a function named "test_*", and return the name for + """Search up the callstack for a function named "test_*", and return the name for use as a test identifier. Idea borrowed from :meth:`iris.tests.IrisTest.result_path`. @@ -175,8 +172,7 @@ def _calling_testname(): @pytest.fixture(autouse=True) def make_tempdir(self, tmp_path_factory): - """ - Automatically-run fixture to activate the 'tmp_path_factory' fixture on *every* + """Automatically-run fixture to activate the 'tmp_path_factory' fixture on *every* test: Make a directory for temporary files, and record it on the test instance. N.B. "tmp_path_factory" is a standard PyTest fixture, which provides a dirpath @@ -214,8 +210,7 @@ def create_testcase_files_or_cubes( var_values_file2: Union[None, str, dict] = None, cubes: bool = False, ): - """ - Create temporary input netcdf files, or cubes, with specific content. + """Create temporary input netcdf files, or cubes, with specific content. Creates a temporary netcdf test file (or two) with the given global and variable-local attributes. Or build cubes, similarly. @@ -299,8 +294,7 @@ def run_testcase( values: Union[List, List[List]], create_cubes_or_files: str = "files", ) -> None: - """ - Create testcase inputs (files or cubes) with specified attributes. + """Create testcase inputs (files or cubes) with specified attributes. Parameters ---------- @@ -365,8 +359,7 @@ def fetch_results( cubes: Iterable[Cube] = None, oldstyle_combined: bool = False, ): - """ - Return testcase results from an output file or cubes in a standardised form. + """Return testcase results from an output file or cubes in a standardised form. Unpick the global+local values of the attribute ``self.attrname``, resulting from a test operation. @@ -657,8 +650,7 @@ def matrix_results(): class TestRoundtrip(MixinAttrsTesting): - """ - Test handling of attributes in roundtrip netcdf-iris-netcdf. + """Test handling of attributes in roundtrip netcdf-iris-netcdf. This behaviour should be (almost) unchanged by the adoption of split-attribute handling. @@ -680,8 +672,7 @@ def do_split(self, request): return do_split def run_roundtrip_testcase(self, attr_name, values): - """ - Initialise the testcase from the passed-in controls, configure the input + """Initialise the testcase from the passed-in controls, configure the input files and run a save-load roundtrip to produce the output file. The name of the attribute, and the input and output temporary filepaths are @@ -708,8 +699,7 @@ def run_roundtrip_testcase(self, attr_name, values): self.captured_warnings = captured_warnings def check_roundtrip_results(self, expected, expected_warnings=None): - """ - Run checks on the generated output file. + """Run checks on the generated output file. The counterpart to :meth:`run_roundtrip_testcase`, with similar arguments. Check existence (or not) of a global attribute, and a number of local @@ -1049,8 +1039,7 @@ def test_roundtrip_matrix(self, testcase, attrname, matrix_results, do_split): class TestLoad(MixinAttrsTesting): - """ - Test loading of file attributes into Iris cube attribute dictionaries. + """Test loading of file attributes into Iris cube attribute dictionaries. Tests loading of various combinations to cube dictionaries, treated as a single combined result (i.e. not split). This behaviour should be (almost) @@ -1321,10 +1310,7 @@ def test_load_matrix(self, testcase, attrname, matrix_results, resultstyle): class TestSave(MixinAttrsTesting): - """ - Test saving from cube attributes dictionary (various categories) into files. - - """ + """Test saving from cube attributes dictionary (various categories) into files.""" # Parametrise all tests over split/unsplit saving. @pytest.fixture(params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True) @@ -1354,8 +1340,7 @@ def run_save_testcase(self, attr_name: str, values: list): self.captured_warnings = captured_warnings def run_save_testcase_legacytype(self, attr_name: str, values: list): - """ - Legacy-type means : before cubes had split attributes. + """Legacy-type means : before cubes had split attributes. This just means we have only one "set" of cubes, with ***no*** distinct global attribute. diff --git a/lib/iris/tests/integration/test_regrid_equivalence.py b/lib/iris/tests/integration/test_regrid_equivalence.py index 331a12a8ac..bedbe54e17 100644 --- a/lib/iris/tests/integration/test_regrid_equivalence.py +++ b/lib/iris/tests/integration/test_regrid_equivalence.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Tests to check the validity of replacing +"""Tests to check the validity of replacing "iris.analysis._interpolate.regrid`('nearest')" with "iris.cube.Cube.regrid(scheme=iris.analysis.Nearest())". diff --git a/lib/iris/tests/integration/um/test_fieldsfile.py b/lib/iris/tests/integration/um/test_fieldsfile.py index 2aff7a2989..18c28ee1c7 100644 --- a/lib/iris/tests/integration/um/test_fieldsfile.py +++ b/lib/iris/tests/integration/um/test_fieldsfile.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test the fast loading of structured Fieldsfiles. +"""Test the fast loading of structured Fieldsfiles. """ diff --git a/lib/iris/tests/pp.py b/lib/iris/tests/pp.py index a0265f9bbf..b0af72d5ee 100644 --- a/lib/iris/tests/pp.py +++ b/lib/iris/tests/pp.py @@ -10,10 +10,7 @@ class PPTest: - """ - A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest. - - """ + """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest.""" @contextlib.contextmanager def cube_save_test( @@ -23,8 +20,7 @@ def cube_save_test( reference_pp_path=None, **kwargs, ): - """ - A context manager for testing the saving of Cubes to PP files. + """A context manager for testing the saving of Cubes to PP files. Args: diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index 5979d1f0c7..ca5adb21fc 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -A collection of routines which create standard Cubes/files for test purposes. +"""A collection of routines which create standard Cubes/files for test purposes. """ import iris.tests as tests # isort:skip @@ -29,8 +28,7 @@ def lat_lon_cube(): - """ - Returns a cube with a latitude and longitude suitable for testing + """Returns a cube with a latitude and longitude suitable for testing saving to PP/NetCDF etc. """ @@ -54,8 +52,7 @@ def lat_lon_cube(): def global_pp(): - """ - Returns a two-dimensional cube derived from PP/aPPglob1/global.pp. + """Returns a two-dimensional cube derived from PP/aPPglob1/global.pp. The standard_name and unit attributes are added to compensate for the broken STASH encoding in that file. @@ -79,8 +76,7 @@ def simple_pp(): def simple_1d(with_bounds=True): - """ - Returns an abstract, one-dimensional cube. + """Returns an abstract, one-dimensional cube. >>> print(simple_1d()) thingness (foo: 11) @@ -109,8 +105,7 @@ def simple_1d(with_bounds=True): def simple_2d(with_bounds=True): - """ - Returns an abstract, two-dimensional, optionally bounded, cube. + """Returns an abstract, two-dimensional, optionally bounded, cube. >>> print(simple_2d()) thingness (bar: 3; foo: 4) @@ -151,8 +146,7 @@ def simple_2d(with_bounds=True): def simple_2d_w_multidim_coords(with_bounds=True): - """ - Returns an abstract, two-dimensional, optionally bounded, cube. + """Returns an abstract, two-dimensional, optionally bounded, cube. >>> print(simple_2d_w_multidim_coords()) thingness (*ANONYMOUS*: 3; *ANONYMOUS*: 4) @@ -173,8 +167,7 @@ def simple_2d_w_multidim_coords(with_bounds=True): def simple_3d_w_multidim_coords(with_bounds=True): - """ - Returns an abstract, two-dimensional, optionally bounded, cube. + """Returns an abstract, two-dimensional, optionally bounded, cube. >>> print(simple_3d_w_multidim_coords()) thingness (wibble: 2; *ANONYMOUS*: 3; *ANONYMOUS*: 4) @@ -251,8 +244,7 @@ def simple_3d_w_multidim_coords(with_bounds=True): def simple_3d(): - """ - Returns an abstract three dimensional cube. + """Returns an abstract three dimensional cube. >>> print(simple_3d()) thingness / (1) (wibble: 2; latitude: 3; longitude: 4) @@ -291,8 +283,7 @@ def simple_3d(): def simple_3d_mask(): - """ - Returns an abstract three dimensional cube that has data masked. + """Returns an abstract three dimensional cube that has data masked. >>> print(simple_3d_mask()) thingness / (1) (wibble: 2; latitude: 3; longitude: 4) @@ -318,8 +309,7 @@ def simple_3d_mask(): def track_1d(duplicate_x=False): - """ - Returns a one-dimensional track through two-dimensional space. + """Returns a one-dimensional track through two-dimensional space. >>> print(track_1d()) air_temperature (y, x: 11) @@ -396,8 +386,7 @@ def simple_2d_w_multidim_and_scalars(): def simple_2d_w_cell_measure_ancil_var(): - """ - Returns a two dimensional cube with a CellMeasure and AncillaryVariable. + """Returns a two dimensional cube with a CellMeasure and AncillaryVariable. >>> print(simple_2d_w_cell_measure_ancil_var()) thingness / (1) (bar: 3; foo: 4) @@ -425,8 +414,7 @@ def simple_2d_w_cell_measure_ancil_var(): def hybrid_height(): - """ - Returns a two-dimensional (Z, X), hybrid-height cube. + """Returns a two-dimensional (Z, X), hybrid-height cube. >>> print(hybrid_height()) TODO: Update! @@ -527,8 +515,7 @@ def simple_4d_with_hybrid_height(): def realistic_3d(): - """ - Returns a realistic 3d cube. + """Returns a realistic 3d cube. >>> print(repr(realistic_3d())) >> print(repr(realistic_4d())) >> print(repr(realistic_4d())) local/global). diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py index be7d52c716..bdf48029e4 100644 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`. +"""Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`. """ diff --git a/lib/iris/tests/unit/common/metadata/test_hexdigest.py b/lib/iris/tests/unit/common/metadata/test_hexdigest.py index 9a16d9252b..035e051440 100644 --- a/lib/iris/tests/unit/common/metadata/test_hexdigest.py +++ b/lib/iris/tests/unit/common/metadata/test_hexdigest.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.common.metadata.hexdigest`. +"""Unit tests for the :func:`iris.common.metadata.hexdigest`. """ diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py index d00bf95b2b..c77d0dc357 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.common.metadata_filter`. +"""Unit tests for the :func:`iris.common.metadata_filter`. """ diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py index a700585aa2..3eda14e635 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`. +"""Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`. """ diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index 650524b0a8..2e858a74bf 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.common.mixin.CFVariableMixin`. +"""Unit tests for the :class:`iris.common.mixin.CFVariableMixin`. """ diff --git a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py index d29a120f35..85d4cfe9a3 100644 --- a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py +++ b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`. +"""Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`. """ diff --git a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py index 634eae4cf3..d7b929eeb3 100644 --- a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py +++ b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`. +"""Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`. """ diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 7c3dc0d4ae..96b12f149a 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.common.resolve.Resolve`. +"""Unit tests for the :class:`iris.common.resolve.Resolve`. """ diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py index 7b65738b15..05a0f3e474 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test coordinate categorisation function add_hour. +"""Test coordinate categorisation function add_hour. """ # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py index 90aef6e59a..2fc3db9b05 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py +++ b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test the coordinate categorisation functions. +"""Test the coordinate categorisation functions. """ import warnings diff --git a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py index 26aa79ac47..768cca70b7 100644 --- a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py +++ b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.coord_systems.AlbersEqualArea` class. +"""Unit tests for the :class:`iris.coord_systems.AlbersEqualArea` class. """ diff --git a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py index b2d0c576bb..a1d978811d 100644 --- a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py +++ b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.coord_systems.LambertAzimuthalEqualArea` class. +"""Unit tests for the :class:`iris.coord_systems.LambertAzimuthalEqualArea` class. """ diff --git a/lib/iris/tests/unit/coords/__init__.py b/lib/iris/tests/unit/coords/__init__.py index 2143868847..d644965526 100644 --- a/lib/iris/tests/unit/coords/__init__.py +++ b/lib/iris/tests/unit/coords/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :mod:`iris.coords` module. +"""Unit tests for the :mod:`iris.coords` module. Provides test methods and classes common to :class:`~iris.tests.unit.coords.test_AuxCoord` and diff --git a/lib/iris/tests/unit/coords/test_AuxCoord.py b/lib/iris/tests/unit/coords/test_AuxCoord.py index abdf4867f7..49d3507880 100644 --- a/lib/iris/tests/unit/coords/test_AuxCoord.py +++ b/lib/iris/tests/unit/coords/test_AuxCoord.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.coords.AuxCoord` class. +"""Unit tests for the :class:`iris.coords.AuxCoord` class. Note: a lot of these methods are actually defined by the :class:`Coord` class, but can only be tested on concrete instances (DimCoord or AuxCoord). diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py index 2b9f808404..2eee4c0def 100644 --- a/lib/iris/tests/unit/coords/test_Cell.py +++ b/lib/iris/tests/unit/coords/test_Cell.py @@ -140,8 +140,7 @@ def test_PartialDateTime_other(self): class Test_contains_point(tests.IrisTest): - """ - Test that contains_point works for combinations of datetime, + """Test that contains_point works for combinations of datetime, cf.datatime, and PartialDateTime objects""" def test_datetime_PartialDateTime_point(self): @@ -236,8 +235,7 @@ def test_cftime_360_day_cftime_360day_point(self): class Test_numpy_comparison(tests.IrisTest): - """ - Unit tests to check that the results of comparisons with numpy types can be + """Unit tests to check that the results of comparisons with numpy types can be used as truth values.""" def test_cell_lhs(self): diff --git a/lib/iris/tests/unit/coords/test_CellMethod.py b/lib/iris/tests/unit/coords/test_CellMethod.py index 274606510a..58a10aff50 100644 --- a/lib/iris/tests/unit/coords/test_CellMethod.py +++ b/lib/iris/tests/unit/coords/test_CellMethod.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.coords.CellMethod`. +"""Unit tests for the :class:`iris.coords.CellMethod`. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/test_DimCoord.py b/lib/iris/tests/unit/coords/test_DimCoord.py index 0b1d123563..aac5defd23 100644 --- a/lib/iris/tests/unit/coords/test_DimCoord.py +++ b/lib/iris/tests/unit/coords/test_DimCoord.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.coords.DimCoord` class. +"""Unit tests for the :class:`iris.coords.DimCoord` class. Note: a lot of these methods are actually defined by the :class:`Coord` class, but can only be tested on concrete instances (DimCoord or AuxCoord). diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index 1cdd6dfe5c..cf850ce907 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -37,8 +37,7 @@ def test(self): class Mixin__string_representations: - """ - Common testcode for generic `__str__`, `__repr__` and `summary` methods. + """Common testcode for generic `__str__`, `__repr__` and `summary` methods. Effectively, __str__ and __repr__ are thin wrappers around `summary`. These are used by all the subclasses : notably Coord/DimCoord/AuxCoord, @@ -60,8 +59,7 @@ class Mixin__string_representations: """ def repr_str_strings(self, dm, linewidth=55): - """ - Return a simple combination of repr and str printouts. + """Return a simple combination of repr and str printouts. N.B. we control linewidth to make the outputs easier to compare. """ @@ -165,8 +163,7 @@ def sample_coord( return coord def coord_representations(self, *args, **kwargs): - """ - Create a test coord and return its string representations. + """Create a test coord and return its string representations. Pass args+kwargs to 'sample_coord' and return the 'repr_str_strings'. @@ -175,8 +172,7 @@ def coord_representations(self, *args, **kwargs): return self.repr_str_strings(coord) def assertLines(self, list_of_expected_lines, string_result): - """ - Assert equality between a result and expected output lines. + """Assert equality between a result and expected output lines. For convenience, the 'expected lines' are joined with a '\\n', because a list of strings is nicer to construct in code. @@ -187,8 +183,7 @@ def assertLines(self, list_of_expected_lines, string_result): class Test__print_common(Mixin__string_representations, tests.IrisTest): - """ - Test aspects of __str__ and __repr__ output common to all + """Test aspects of __str__ and __repr__ output common to all _DimensionalMetadata instances. I.E. those from CFVariableMixin, plus values array (data-manager). @@ -683,8 +678,7 @@ def test_integers_masked_long(self): class Test__print_Coord(Mixin__string_representations, tests.IrisTest): - """ - Test Coord-specific aspects of __str__ and __repr__ output. + """Test Coord-specific aspects of __str__ and __repr__ output. Aspects : * DimCoord / AuxCoord @@ -756,8 +750,7 @@ def test_circular(self): class Test__print_noncoord(Mixin__string_representations, tests.IrisTest): - """ - Limited testing of other _DimensionalMetadata subclasses. + """Limited testing of other _DimensionalMetadata subclasses. * AncillaryVariable * CellMeasure @@ -911,9 +904,7 @@ def test_meshcoord(self): class Test_summary(Mixin__string_representations, tests.IrisTest): - """ - Test the controls of the 'summary' method. - """ + """Test the controls of the 'summary' method.""" def test_shorten(self): coord = self.sample_coord() diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 0829b8ccf6..85335d9945 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -956,14 +956,12 @@ def test_weights_dim_coord(self): class Test_slices_dim_order(tests.IrisTest): - """ - This class tests the capability of iris.cube.Cube.slices(), including its + """This class tests the capability of iris.cube.Cube.slices(), including its ability to correctly re-order the dimensions. """ def setUp(self): - """ - setup a 4D iris cube, each dimension is length 1. + """setup a 4D iris cube, each dimension is length 1. The dimensions are; dim1: time dim2: height @@ -978,8 +976,7 @@ def setUp(self): @staticmethod def expected_cube_setup(dim1name, dim2name, dim3name): - """ - input: + """input: ------ dim1name: str name of the first dimension coordinate @@ -999,8 +996,7 @@ def expected_cube_setup(dim1name, dim2name, dim3name): return cube def check_order(self, dim1, dim2, dim3, dim_to_remove): - """ - does two things: + """does two things: (1) slices the 4D cube in dim1, dim2, dim3 (and removes the scalar coordinate) and (2) sets up a 3D cube with dim1, dim2, dim3. @@ -1951,8 +1947,7 @@ def test__lazy(self): def _add_test_meshcube(self, nomesh=False, n_z=2, **meshcoord_kwargs): - """ - Common setup action : Create a standard mesh test cube with a variety of coords, and save the cube and various of + """Common setup action : Create a standard mesh test cube with a variety of coords, and save the cube and various of its components as properties of the 'self' TestCase. """ @@ -1973,8 +1968,7 @@ def _add_test_meshcube(self, nomesh=False, n_z=2, **meshcoord_kwargs): class Test_coords__mesh_coords(tests.IrisTest): - """ - Checking *only* the new "mesh_coords" keyword of the coord/coords methods. + """Checking *only* the new "mesh_coords" keyword of the coord/coords methods. This is *not* attached to the existing tests for this area, as they are very old and patchy legacy tests. See: iris.tests.test_cdm.TestQueryCoord. @@ -1986,8 +1980,7 @@ def setUp(self): _add_test_meshcube(self) def _assert_lists_equal(self, items_a, items_b): - """ - Check that two lists of coords, cubes etc contain the same things. + """Check that two lists of coords, cubes etc contain the same things. Lists must contain the same items, including any repeats, but can be in a different order. @@ -2103,8 +2096,7 @@ def test_alternate(self): class Test__init__mesh(tests.IrisTest): - """ - Test that creation with mesh-coords functions, and prevents a cube having + """Test that creation with mesh-coords functions, and prevents a cube having incompatible mesh-coords. """ @@ -2220,8 +2212,7 @@ def test_fail_meshcoords_different_dims(self): class Test__add_aux_coord__mesh(tests.IrisTest): - """ - Test that "Cube.add_aux_coord" functions with a mesh-coord, and prevents a + """Test that "Cube.add_aux_coord" functions with a mesh-coord, and prevents a cube having incompatible mesh-coords. """ @@ -2297,10 +2288,7 @@ def test_fail_different_dimension(self): class Test__add_dim_coord__mesh(tests.IrisTest): - """ - Test that "Cube.add_dim_coord" cannot work with a mesh-coord. - - """ + """Test that "Cube.add_dim_coord" cannot work with a mesh-coord.""" def test(self): # Create a mesh with only 2 faces, so coord *can't* be non-monotonic. @@ -2312,8 +2300,7 @@ def test(self): class Test__eq__mesh(tests.IrisTest): - """ - Check that cubes with meshes support == as expected. + """Check that cubes with meshes support == as expected. Note: there is no special code for this in iris.cube.Cube : it is provided by the coord comparisons. @@ -2992,8 +2979,7 @@ def simplecube(): class Test__dimensional_metadata: - """ - Tests for the "Cube._dimensional_data" method. + """Tests for the "Cube._dimensional_data" method. NOTE: test could all be static methods, but that adds a line to each definition. """ @@ -3054,8 +3040,7 @@ def test_two_with_same_name_specify_instance(self, simplecube): class TestReprs: - """ - Confirm that str(cube), repr(cube) and cube.summary() work by creating a fresh + """Confirm that str(cube), repr(cube) and cube.summary() work by creating a fresh :class:`iris._representation.cube_printout.CubePrinter` object, and using it in the expected ways. @@ -3112,8 +3097,7 @@ def test_summary_effects(self, simplecube, patched_cubeprinter): class TestHtmlRepr: - """ - Confirm that Cube._repr_html_() creates a fresh + """Confirm that Cube._repr_html_() creates a fresh :class:`iris.experimental.representation.CubeRepresentation` object, and uses it in the expected way. diff --git a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py index ccf9691e78..50de4541e0 100644 --- a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py +++ b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py @@ -20,8 +20,7 @@ def sample_attrs() -> CubeAttrsDict: def check_content(attrs, locals=None, globals=None, matches=None): - """ - Check a CubeAttrsDict for expected properties. + """Check a CubeAttrsDict for expected properties. Its ".globals" and ".locals" must match 'locals' and 'globals' args -- except that, if 'matches' is provided, it is a CubeAttrsDict, whose diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index 1f830c3398..72ca7d2306 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -507,8 +507,7 @@ def test_multi_cubes(self): class ExtractCubesSingleConstraintMixin(ExtractCubesMixin): - """ - Common code for testing extract_cubes with a single constraint. + """Common code for testing extract_cubes with a single constraint. Generalised, so that we can do the same tests for a "bare" constraint, and a list containing a single [constraint]. @@ -571,9 +570,7 @@ class Test_extract_cubes__list_single_constraint( class Test_extract_cubes__multi_constraints(ExtractCubesMixin, tests.IrisTest): - """ - Testing when the 'constraints' arg is a list of multiple constraints. - """ + """Testing when the 'constraints' arg is a list of multiple constraints.""" def test_empty(self): # Always fails. @@ -688,8 +685,7 @@ def test_copy(self): class TestHtmlRepr: - """ - Confirm that Cubelist._repr_html_() creates a fresh + """Confirm that Cubelist._repr_html_() creates a fresh :class:`iris.experimental.representation.CubeListRepresentation` object, and uses it in the expected way. diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py index 517e684d46..40fa42e976 100644 --- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -618,8 +618,7 @@ def get_result( return out_cube def test_basic(self): - """ - Check the least complicated version works (set climatological, set + """Check the least complicated version works (set climatological, set points correctly). """ result = self.get_result() @@ -636,8 +635,7 @@ def test_basic(self): self.assertFalse(categorised_coord.climatological) def test_2d_other_coord(self): - """ - Check that we can handle aggregation applying to a 2d AuxCoord that + """Check that we can handle aggregation applying to a 2d AuxCoord that covers the aggregation dimension and another one. """ result = self.get_result(partially_aligned=True) @@ -658,8 +656,7 @@ def test_2d_other_coord(self): self.assertFalse(part_aligned_coord.climatological) def test_2d_timelike_other_coord(self): - """ - Check that we can handle aggregation applying to a 2d AuxCoord that + """Check that we can handle aggregation applying to a 2d AuxCoord that covers the aggregation dimension and another one. """ result = self.get_result( @@ -680,9 +677,7 @@ def test_2d_timelike_other_coord(self): self.assertTrue(part_aligned_coord.climatological) def test_transposed(self): - """ - Check that we can handle the axis of aggregation being a different one. - """ + """Check that we can handle the axis of aggregation being a different one.""" result = self.get_result(transpose=True) aligned_coord = result.coord("aligned") @@ -707,9 +702,7 @@ def test_bounded(self): self.assertTrue(aligned_coord.climatological) def test_multiple_agg_coords(self): - """ - Check that we can aggregate on multiple coords on the same axis. - """ + """Check that we can aggregate on multiple coords on the same axis.""" result = self.get_result(second_categorised=True) aligned_coord = result.coord("aligned") @@ -731,8 +724,7 @@ def test_multiple_agg_coords(self): self.assertFalse(categorised_coord2.climatological) def test_non_climatological_units(self): - """ - Check that the failure to set the climatological flag on an incompatible + """Check that the failure to set the climatological flag on an incompatible unit is handled quietly. """ result = self.get_result(invalid_units=True) @@ -743,8 +735,7 @@ def test_non_climatological_units(self): self.assertFalse(aligned_coord.climatological) def test_clim_in_clim_op(self): - """ - Check the least complicated version works (set climatological, set + """Check the least complicated version works (set climatological, set points correctly). For the input coordinate to be climatological, it must have bounds """ @@ -763,8 +754,7 @@ def test_clim_in_clim_op(self): self.assertFalse(categorised_coord.climatological) def test_clim_in_no_clim_op(self): - """ - Check the least complicated version works (set climatological, set + """Check the least complicated version works (set climatological, set points correctly). For the input coordinate to be climatological, it must have bounds. """ @@ -797,8 +787,7 @@ def setUp(self): self.aggregator = iris.analysis.MEAN def test_grouped_dim(self): - """ - Check that derived coordinates are maintained when the coordinates they + """Check that derived coordinates are maintained when the coordinates they derive from are aggregated. """ result = self.cube.aggregated_by( @@ -817,8 +806,7 @@ def test_grouped_dim(self): assert np.array_equal(expected_bounds, result.coord("altitude").bounds) def test_ungrouped_dim(self): - """ - Check that derived coordinates are preserved when aggregating along a + """Check that derived coordinates are preserved when aggregating along a different axis. """ result = self.cube.aggregated_by( diff --git a/lib/iris/tests/unit/data_manager/test_DataManager.py b/lib/iris/tests/unit/data_manager/test_DataManager.py index 1b91e256f4..f35c2fcfcb 100644 --- a/lib/iris/tests/unit/data_manager/test_DataManager.py +++ b/lib/iris/tests/unit/data_manager/test_DataManager.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris._data_manager.DataManager`. +"""Unit tests for the :class:`iris._data_manager.DataManager`. """ diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index 3cec1f8569..8459bdd6d8 100644 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function +"""Test function :func:`iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid`. """ diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py index f9397da219..ca4d89598c 100644 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py +++ b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function +"""Test function :func:`iris.experimental.regrid.regrid_weighted_curvilinear_to_rectilinear`. """ diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index 7f425d371d..f587019f3a 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.experimental.stratify.relevel` function. +"""Unit tests for the :func:`iris.experimental.stratify.relevel` function. """ diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py index 79eb9aac1e..5f613840a3 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridAuxiliaryCoordinateVariable` class. +"""Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridAuxiliaryCoordinateVariable` class. todo: fold these tests into cf tests when experimental.ugrid is folded into standard behaviour. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py index 1dd45c323b..dcddfa08b8 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridConnectivityVariable` class. +"""Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridConnectivityVariable` class. todo: fold these tests into cf tests when experimental.ugrid is folded into standard behaviour. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py index a252618e85..9577955f97 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. +"""Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. todo: fold these tests into cf tests when experimental.ugrid is folded into standard behaviour. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py index 10212f5ae5..ccefe01b3c 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridMeshVariable` class. +"""Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridMeshVariable` class. todo: fold these tests into cf tests when experimental.ugrid is folded into standard behaviour. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py index d2dd32f1e3..cb2ae41d72 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. +"""Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. todo: fold these tests into cf tests when experimental.ugrid is folded into standard behaviour. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py b/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py index 5c33b27d3e..7ccdeee08b 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.load.ParseUgridOnLoad` class. +"""Unit tests for the :class:`iris.experimental.ugrid.load.ParseUgridOnLoad` class. todo: remove this module when experimental.ugrid is folded into standard behaviour. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py index 010ecddd09..09e15915db 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.experimental.ugrid.load.load_mesh` function. +"""Unit tests for the :func:`iris.experimental.ugrid.load.load_mesh` function. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py index 02b835bbfa..d0cfdd4309 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.experimental.ugrid.load.load_meshes` function. +"""Unit tests for the :func:`iris.experimental.ugrid.load.load_meshes` function. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index 37bd49d346..bc0daf14bc 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -927,8 +927,7 @@ def test_node_dimension_set(self): self.assertEqual("foo", self.mesh.node_dimension) def test_remove_connectivities(self): - """ - Test that remove() mimics the connectivities() method correctly, + """Test that remove() mimics the connectivities() method correctly, and prevents removal of mandatory connectivities. """ diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index 2bd8e5ddc4..bf5500c7ed 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.mesh.MeshCoord`. +"""Unit tests for the :class:`iris.experimental.ugrid.mesh.MeshCoord`. """ # Import iris.tests first so that some things can be initialised before @@ -104,8 +103,7 @@ def test_set_climatological(self): class Test__inherited_properties(tests.IrisTest): - """ - Check the settability and effect on equality of the common BaseMetadata + """Check the settability and effect on equality of the common BaseMetadata properties inherited from Coord : i.e. names/units/attributes. Though copied from the mesh at creation, they are also changeable. @@ -515,8 +513,7 @@ def test_basic(self): class Test_MeshCoord__dataviews(tests.IrisTest): - """ - Fuller testing of points and bounds calculations and behaviour. + """Fuller testing of points and bounds calculations and behaviour. Including connectivity missing-points (non-square faces). """ diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py index d374a98144..4d12a73a9e 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords`. +"""Unit tests for the :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords`. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py index 57218cd299..b0d4d70cbe 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.metadata.ConnectivityMetadata`. +"""Unit tests for the :class:`iris.experimental.ugrid.metadata.ConnectivityMetadata`. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py index 5a4befa9ce..dbf1446b52 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshCoordMetadata`. +"""Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshCoordMetadata`. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py index bc7e3e70db..98e918c342 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshMetadata`. +"""Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshMetadata`. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py index b42eed9f34..9fcb775433 100644 --- a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.experimental.ugrid.utils.recombine_submeshes`. +"""Unit tests for :func:`iris.experimental.ugrid.utils.recombine_submeshes`. """ # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/__init__.py b/lib/iris/tests/unit/fileformats/__init__.py index 4b76ac5d48..81e6c8cedf 100644 --- a/lib/iris/tests/unit/fileformats/__init__.py +++ b/lib/iris/tests/unit/fileformats/__init__.py @@ -43,8 +43,7 @@ def _test_for_coord( def assertCoordsAndDimsListsMatch( self, coords_and_dims_got, coords_and_dims_expected ): - """ - Check that coords_and_dims lists are equivalent. + """Check that coords_and_dims lists are equivalent. The arguments are lists of pairs of (coordinate, dimensions). The elements are compared one-to-one, by coordinate name (so the order diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index ae6a87de3f..0e6805d104 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the `iris.fileformats.cf.CFReader` class. +"""Unit tests for the `iris.fileformats.cf.CFReader` class. """ diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index da3ac01f66..2c19bdc12e 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -69,8 +69,7 @@ def test_call_structure(self, _FFHeader): class Test__extract_field__LBC_format(tests.IrisTest): @contextlib.contextmanager def mock_for_extract_field(self, fields, x=None, y=None): - """ - A context manager to ensure FF2PP._extract_field gets a field + """A context manager to ensure FF2PP._extract_field gets a field instance looking like the next one in the "fields" iterable from the "make_pp_field" call. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py index 26e6208db1..2d9faa90e5 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.fileformats.name_loaders._build_cell_methods`. +"""Unit tests for :func:`iris.fileformats.name_loaders._build_cell_methods`. """ diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py index 33f18d5a7a..0a020e6142 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.analysis.name_loaders._build_lat_lon_for_NAME_timeseries`. +"""Unit tests for :func:`iris.analysis.name_loaders._build_lat_lon_for_NAME_timeseries`. """ diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py index 20ef79cec3..fb28ad911b 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.fileformats.name_loaders.__calc_integration_period`. +"""Unit tests for :func:`iris.fileformats.name_loaders.__calc_integration_period`. """ diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py index ea09d40acb..86729ef024 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.analysis.name_loaders._cf_height_from_name` +"""Unit tests for the :class:`iris.analysis.name_loaders._cf_height_from_name` function. """ diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py index ffaf6957ce..f41c52c105 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.analysis.name_loaders._generate_cubes`. +"""Unit tests for :func:`iris.analysis.name_loaders._generate_cubes`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py index a29f504b7e..0cdb3690a0 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the module +"""Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules` . """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index efb5e55be8..8107a869f4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`. +"""Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`. """ from pathlib import Path @@ -35,8 +34,7 @@ class Mixin__nc_load_actions: - """ - Class to make testcases for rules or actions code, and check results. + """Class to make testcases for rules or actions code, and check results. Defines standard setUpClass/tearDownClass methods, to create a temporary directory for intermediate files. @@ -69,8 +67,7 @@ def tearDownClass(cls): shutil.rmtree(cls.temp_dirpath) def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): - """ - Load the 'phenom' data variable in a CDL testcase, as a cube. + """Load the 'phenom' data variable in a CDL testcase, as a cube. Using ncgen, CFReader and the _load_cube call. @@ -120,8 +117,7 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): return cube def run_testcase(self, warning_regex=None, **testcase_kwargs): - """ - Run a testcase with chosen options, returning a test cube. + """Run a testcase with chosen options, returning a test cube. The kwargs apply to the '_make_testcase_cdl' method. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index 455cb4f003..72e9448255 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the engine.activate() call within the +"""Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. Here, *specifically* testcases relating to grid-mappings and dim-coords. @@ -34,8 +33,7 @@ def _make_testcase_cdl( xco_is_dim=True, yco_is_dim=True, ): - """ - Create a CDL string for a testcase. + """Create a CDL string for a testcase. This is the "master" routine for creating all our testcases. Kwarg options modify a simple default testcase with a latlon grid. @@ -257,8 +255,7 @@ def check_result( xco_stdname=True, yco_stdname=True, ): - """ - Check key properties of a result cube. + """Check key properties of a result cube. Various options control the expected things which are tested. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index ad310e9450..5af6d6fa1d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the engine.activate() call within the +"""Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. Test rules activation relating to hybrid vertical coordinates. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py index 80908b2abd..e6a2c203b7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the engine.activate() call within the +"""Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. Tests for rules behaviour in identifying latitude/longitude dim-coords, both diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py index 3d0a0017c8..a7d5a10e73 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the engine.activate() call within the +"""Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. Tests for rules activation relating to some isolated aspects : diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index efd67e949a..b3c2fe9b0b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the engine.activate() call within the +"""Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. Tests for rules activation relating to 'time' and 'time_period' coords. @@ -160,8 +159,7 @@ def _make_testcase_cdl( return cdl_string def check_result(self, cube, time_is="dim", period_is="missing"): - """ - Check presence of expected dim/aux-coords in the result cube. + """Check presence of expected dim/aux-coords in the result cube. Both of 'time_is' and 'period_is' can take values 'dim', 'aux' or 'missing'. @@ -220,9 +218,7 @@ class Mixin__singlecoord__tests(Mixin__timecoords__common): which = None def run_testcase(self, coord_dim_name=None, **opts): - """ - Specialise 'run_testcase' for single-coord 'time' or 'period' testing. - """ + """Specialise 'run_testcase' for single-coord 'time' or 'period' testing.""" which = self.which assert which in ("time", "period") @@ -253,9 +249,7 @@ def run_testcase(self, coord_dim_name=None, **opts): return result def check_result(self, cube, coord_is="dim"): - """ - Specialise 'check_result' for single-coord 'time' or 'period' testing. - """ + """Specialise 'check_result' for single-coord 'time' or 'period' testing.""" # Pass generic 'coord_is' option to parent as time/period options. which = self.which assert which in ("time", "period") diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py index 127ebbf68b..bd645120a7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the module +"""Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules.engine` . """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py index a8926a0c59..e54c6938bc 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. +"""Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. """ from unittest import mock diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py index 62bc3a6c9f..e151d92aa8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the module +"""Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules.helpers` . """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py index 1481f2c886..7d5aa24219 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_albers_equal_area_coordinate_system`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py index 2d1010166f..5ee0c2d992 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.build_ancil_var`. +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_ancil_var`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py index 7cb7cbf897..e2335d2ee6 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_auxilliary_coordinate`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py index ee66f8b267..74e7d5117d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.build_cell_measure`. +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_cell_measure`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py index 973e10217b..165dd97624 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers\ build_cube_metadata`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py index 8676ce4a4c..b2c7d4f4d6 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_dimension_coordinate`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py index 7fe95840b3..41be1ea932 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_geostationary_coordinate_system`. """ @@ -23,8 +22,7 @@ class TestBuildGeostationaryCoordinateSystem(tests.IrisTest): def _test(self, inverse_flattening=False, replace_props=None, remove_props=None): - """ - Generic test that can check vertical perspective validity with or + """Generic test that can check vertical perspective validity with or without inverse flattening. """ # Make a dictionary of the non-ellipsoid properties to be added to both a test diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py index 93d84055ab..45241fbced 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_lambert_azimuthal_equal_area_coordinate_system`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py index d2d0659077..fc45a6eab8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_lambert_conformal_coordinate_system`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index cca3610925..dc2188b65e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_mercator_coordinate_system`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py index 26b6d30573..4554ef601d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`. +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`. """ from typing import List, NamedTuple, Type diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py index a20443005c..4661ea5449 100755 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_polar_stereographic_coordinate_system`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index 46c81242ad..4928631336 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_sterographic_coordinate_system`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py index 77413adb19..ad61c485e0 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_transverse_mercator_coordinate_system`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py index 21906ba644..2c65e09c3f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_vertical_perspective_coordinate_system`. """ @@ -23,8 +22,7 @@ class TestBuildVerticalPerspectiveCoordinateSystem(tests.IrisTest): def _test(self, inverse_flattening=False, no_offsets=False): - """ - Generic test that can check vertical perspective validity with or + """Generic test that can check vertical perspective validity with or without inverse flattening, and false_east/northing-s. """ test_easting = 100.0 diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py index d80b33f002..5e458c0328 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_attr_units`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py index 7d0dc4952c..43a07fe17b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_cf_bounds_var`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py index b6a0f3d3c1..b55e570ddc 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_names`. """ @@ -20,8 +19,7 @@ class TestGetNames(tests.IrisTest): - """ - The tests included in this class cover all the variations of possible + """The tests included in this class cover all the variations of possible combinations of the following inputs: * standard_name = [None, 'projection_y_coordinate', 'latitude_coordinate'] * long_name = [None, 'lat_long_name'] diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index 20b9a7347f..66d3ffb7e4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ has_supported_mercator_parameters`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py index 3bcf14e0b3..a7dc5bd029 100755 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ has_supported_polar_stereographic_parameters`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index f3908461c7..abbe71012d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`. +"""Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`. """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py index 1e9d13110e..66620166c5 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.\ reorder_bounds_data`. """ diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py index 221ee30376..d032e2e576 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py @@ -178,8 +178,7 @@ def test_no_chunks_from_file(tmp_filepath, save_cubelist_with_sigma): def test_as_dask(tmp_filepath, save_cubelist_with_sigma): - """ - This does not test return values, as we can't be sure + """This does not test return values, as we can't be sure dask chunking behaviour won't change, or that it will differ from our own chunking behaviour. """ diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py index 0bd4966944..64c2c82007 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.netcdf._translate_constraints_to_var_callback`. """ diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 7abb73ae52..dfb1379d5a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.fileformats.netcdf.load_cubes` function. +"""Unit tests for the :func:`iris.fileformats.netcdf.load_cubes` function. todo: migrate the remaining unit-esque tests from iris.tests.test_netcdf, switching to use netcdf.load_cubes() instead of iris.load()/load_cube(). diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 38c4fff32d..28ef972c8c 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -847,8 +847,7 @@ def _cube_with_cs(self, coord_system): return cube def _grid_mapping_variable(self, coord_system): - """ - Return a mock netCDF variable that represents the conversion + """Return a mock netCDF variable that represents the conversion of the given coordinate system. """ @@ -874,8 +873,7 @@ def setncattr(self, name, attr): return grid_variable def _variable_attributes(self, coord_system): - """ - Return the attributes dictionary for the grid mapping variable + """Return the attributes dictionary for the grid mapping variable that is created from the given coordinate system. """ diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index d16205072a..69eabac5f5 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :meth:`iris.fileformats.netcdf.saver.Saver._lazy_stream_data`. +"""Unit tests for :meth:`iris.fileformats.netcdf.saver.Saver._lazy_stream_data`. The behaviour of this method is complex, and this only tests certain aspects. The testing of the dask delayed operations and file writing are instead covered by @@ -119,8 +118,7 @@ def test_data_save(self, compute, data_form): cf_var._data_array == mock.sentinel.exact_data_array def test_warnings(self, compute, data_form): - """ - For real data, fill-value warnings are issued immediately. + """For real data, fill-value warnings are issued immediately. For lazy data, warnings are returned from computing a delayed completion. For 'emulated' data (direct array transfer), no checks + no warnings ever. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index fb51a123e4..070cdcaf8b 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :class:`iris.fileformats.netcdf.Saver` class. +"""Unit tests for the :class:`iris.fileformats.netcdf.Saver` class. WHEN MODIFYING THIS MODULE, CHECK IF ANY CORRESPONDING CHANGES ARE NEEDED IN :mod:`iris.tests.unit.fileformats.netcdf.test_Saver__lazy.` @@ -41,8 +40,7 @@ def build_mesh( conn_role_kwargs=None, # mapping {connectivity-role: connectivity-kwargs} mesh_kwargs=None, ): - """ - Make a test mesh. + """Make a test mesh. Mesh has faces edges, face-coords and edge-coords, numbers of which can be controlled. @@ -129,8 +127,7 @@ def apply_xyargs(coords, xyargs): def make_mesh(basic=True, **kwargs): - """ - Create a test mesh, with some built-in 'standard' settings. + """Create a test mesh, with some built-in 'standard' settings. Kwargs: @@ -194,8 +191,7 @@ def default_mesh(): def make_cube(mesh=None, location="face", **kwargs): - """ - Create a test cube, based on a given mesh + location. + """Create a test cube, based on a given mesh + location. Kwargs: @@ -236,8 +232,7 @@ def add_height_dim(cube): def scan_dataset(filepath): - """ - Snapshot a netcdf dataset (the key metadata). + """Snapshot a netcdf dataset (the key metadata). Returns: dimsdict, varsdict @@ -263,8 +258,7 @@ def scan_dataset(filepath): def vars_w_props(varsdict, **kwargs): - """ - Subset a vars dict, {name:props}, returning only those where each + """Subset a vars dict, {name:props}, returning only those where each =, defined by the given keywords. Except that '="*"' means that '' merely _exists_, with any value. @@ -303,8 +297,7 @@ def vars_meshnames(vars): def vars_meshdim(vars, location, mesh_name=None): - """ - Extract a dim-name for a given element location. + """Extract a dim-name for a given element location. Args: * vars (varsdict): @@ -344,8 +337,7 @@ def tearDownClass(cls): shutil.rmtree(cls.temp_dir) def check_save_cubes(self, cube_or_cubes): - """ - Write cubes to a new file in the common temporary directory. + """Write cubes to a new file in the common temporary directory. Use a name unique to this testcase, to avoid any clashes. @@ -657,9 +649,7 @@ def test_alternate_cube_dim_order(self): self.assertEqual(v_b[_VAR_DIMS], ["Mesh2d_faces", "height"]) def test_mixed_aux_coords(self): - """ - ``coordinates`` attribute should include mesh location coords and 'normal' coords. - """ + """``coordinates`` attribute should include mesh location coords and 'normal' coords.""" cube = make_cube() mesh_dim = cube.mesh_dim() @@ -694,8 +684,7 @@ def tearDownClass(cls): shutil.rmtree(cls.temp_dir) def check_save_mesh(self, mesh): - """ - Write a mesh to a new file in the common temporary directory. + """Write a mesh to a new file in the common temporary directory. Use a name unique to this testcase, to avoid any clashes. @@ -712,8 +701,7 @@ def check_save_mesh(self, mesh): return tempfile_path def test_connectivity_dim_order(self): - """ - Test a mesh with some connectivities in the 'other' order. + """Test a mesh with some connectivities in the 'other' order. This should also create a property with the dimension name. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py index c077c81f20..9fb7485734 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.fileformats.netcdf.saver._data_fillvalue_check`. +"""Unit tests for :func:`iris.fileformats.netcdf.saver._data_fillvalue_check`. Note: now runs all testcases on both real + lazy data. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py index d1ffb56a28..77fd0cb0ca 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`. +"""Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`. """ import warnings diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index d4c3826549..1f0a39f050 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -215,8 +215,7 @@ def test_multi_wrong_length(self): class Test_HdfSaveBug(tests.IrisTest): - """ - Check for a known problem with netcdf4. + """Check for a known problem with netcdf4. If you create dimension with the same name as an existing variable, there is a specific problem, relating to HDF so limited to netcdf-4 formats. @@ -268,8 +267,7 @@ def _check_save_and_reload(self, cubes): return result def assertSameCubes(self, cube1, cube2): - """ - A special tolerant cube compare. + """A special tolerant cube compare. Ignore any 'Conventions' attributes. Ignore all var-names. @@ -353,8 +351,7 @@ def test_connectivity_dim_varname_collision(self): class Test_compute_usage: - """ - Test the operation of the save function 'compute' keyword. + """Test the operation of the save function 'compute' keyword. In actual use, this keyword controls 'delayed saving'. That is tested elsewhere, in testing the 'Saver' class itself. diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py index 88d1f56a7a..03ec3f5f65 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the `iris.fileformats.nimrod_load_rules.units` function. +"""Unit tests for the `iris.fileformats.nimrod_load_rules.units` function. """ diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py index 41f8fdfabb..809c54726c 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the `iris.fileformats.nimrod_load_rules.vertical_coord` +"""Unit tests for the `iris.fileformats.nimrod_load_rules.vertical_coord` function. """ diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py index 4e2ef616b2..1dbb2097fb 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function. +"""Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function. """ diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index 1fcf04636c..b7558c4c8a 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -56,10 +56,7 @@ def test_realization(): def _pp_save_ppfield_values(cube): - """ - Emulate saving a cube as PP, and capture the resulting PP field values. - - """ + """Emulate saving a cube as PP, and capture the resulting PP field values.""" # Create a test object to stand in for a real PPField. pp_field = mock.MagicMock(spec=pp.PPField3) # Add minimal content required by the pp.save operation. @@ -224,8 +221,7 @@ def test_maximum(self): class TestTimeMean(tests.IrisTest): - """ - Tests that time mean cell method is converted to pp appropriately. + """Tests that time mean cell method is converted to pp appropriately. Pattern is pairs of tests - one with time mean method, and one without, to show divergent behaviour. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py index 3bdbdfb8a6..58b7c1f384 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the `iris.fileformats.pp_load_rules._all_other_rules` function. +"""Unit tests for the `iris.fileformats.pp_load_rules._all_other_rules` function. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py index d9a44fe013..7d502bc2d6 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.pp_load_rules._collapse_degenerate_points_and_bounds`. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py index 803e47227f..bc3cf8ed86 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.pp_load_rules._convert_pseudo_level_coords`. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py index 6159a1dbd4..ac28fe0a1c 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.pp_load_rules._convert_scalar_realization_coords`. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py index 4f50d682d5..5cebc009b9 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.pp_load_rules._convert_time_coords`. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py index a6a51a750b..0e159b254e 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.pp_load_rules._convert_vertical_coords`. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py index af717bb62e..f2f19d9bb1 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.pp_load_rules._epoch_date_hours`. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py index c99de5bc34..fd3d236625 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for :func:`iris.fileformats.pp_load_rules._model_level_number`. +"""Unit tests for :func:`iris.fileformats.pp_load_rules._model_level_number`. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py index a33128f39b..6dfc6189bb 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.pp_load_rules._reduce_points_and_bounds`. """ diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py index d12a718e98..69ff56391e 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for +"""Unit tests for :func:`iris.fileformats.pp_load_rules._reshape_vector_args`. """ diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py b/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py index 8a0a9a38d7..71a86116db 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the +"""Unit tests for the :mod:`iris.fileformats._structured_array_identification` module. """ diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py index 685b5fc6d1..6012f1fce8 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the +"""Unit tests for the :mod:`iris.fileformats._structured_array_identification.ArrayStructure` class. """ @@ -130,8 +129,7 @@ def test_multi_dim_array(self): class nd_array_and_dims_cases: - """ - Defines the test functionality for nd_array_and_dims. This class + """Defines the test functionality for nd_array_and_dims. This class isn't actually the test case - see the C order and F order subclasses for those. diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py index 9eb2e7e8e6..868f37a1a8 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the +"""Unit tests for the :mod:`iris.fileformats._structured_array_identification.GroupStructure` class. """ diff --git a/lib/iris/tests/unit/fileformats/test_rules.py b/lib/iris/tests/unit/fileformats/test_rules.py index 3f271091af..07681dcf22 100644 --- a/lib/iris/tests/unit/fileformats/test_rules.py +++ b/lib/iris/tests/unit/fileformats/test_rules.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test iris.fileformats.rules.py - metadata translation rules. +"""Test iris.fileformats.rules.py - metadata translation rules. """ diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py index f2c18b5f8a..89897d173b 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py @@ -2,7 +2,6 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the module :mod:`iris.fileformats.um._fast_load`. +"""Unit tests for the module :mod:`iris.fileformats.um._fast_load`. """ diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py index 24b438f76f..35da9fab47 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the class +"""Unit tests for the class :class:`iris.fileformats.um._fast_load.FieldCollation`. This only tests the additional functionality for recording file locations of diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py index c26382aca9..9d484deef1 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the module +"""Unit tests for the module :mod:`iris.fileformats.um._fast_load_structured_fields`. """ diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py index e3e22b94e1..a07672e43a 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the class +"""Unit tests for the class :class:`iris.fileformats.um._fast_load_structured_fields.BasicFieldCollation`. """ diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py index 3b586434b6..7c1a9113b4 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the function :func:\ +"""Unit tests for the function :func:\ `iris.fileformats.um._fast_load_structured_fields.group_structured_fields`. """ @@ -18,8 +17,7 @@ def _convert_to_vector(value, length, default): - """ - Return argument (or default) in a list of length 'length'. + """Return argument (or default) in a list of length 'length'. The 'value' arg must either be scalar, or a list of length 'length'. A value of None is replaced by the default. diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py index 5a72973519..2261b8c99e 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the module +"""Unit tests for the module :mod:`iris.fileformats.um._optimal_array_structuring`. """ diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py index 63f54fd356..7d90903304 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the function +"""Unit tests for the function :func:`iris.fileformats.um._optimal_array_structuring.optimal_array_structure`. """ diff --git a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py index c6256ab015..05c2749f40 100644 --- a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py +++ b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the function +"""Unit tests for the function :func:`iris.fileformats.um.um_to_pp`. """ diff --git a/lib/iris/tests/unit/plot/__init__.py b/lib/iris/tests/unit/plot/__init__.py index 5bd4f4785b..c262d014f3 100644 --- a/lib/iris/tests/unit/plot/__init__.py +++ b/lib/iris/tests/unit/plot/__init__.py @@ -58,8 +58,7 @@ def assertPointsTickLabels(self, axis, axes=None): @tests.skip_plot class MixinCoords: - """ - Mixin class of common plotting tests providing 2-dimensional + """Mixin class of common plotting tests providing 2-dimensional permutations of coordinates and anonymous dimensions. """ diff --git a/lib/iris/tests/unit/plot/test_plot.py b/lib/iris/tests/unit/plot/test_plot.py index 26890175b8..126c516ea0 100644 --- a/lib/iris/tests/unit/plot/test_plot.py +++ b/lib/iris/tests/unit/plot/test_plot.py @@ -67,8 +67,7 @@ def test_plot_longitude(self): @tests.skip_plot class TestTrajectoryWrap(tests.IrisTest): - """ - Test that a line plot of geographic coordinates wraps around the end of the + """Test that a line plot of geographic coordinates wraps around the end of the coordinates rather than plotting across the map. """ @@ -87,8 +86,7 @@ def lon_lat_coords(self, lons, lats, cs=None): ) def assertPathsEqual(self, expected, actual): - """ - Assert that the given paths are equal once STOP vertices have been + """Assert that the given paths are equal once STOP vertices have been removed """ @@ -101,8 +99,7 @@ def assertPathsEqual(self, expected, actual): self.assertArrayEqual(expected.codes, actual.codes) def check_paths(self, expected_path, expected_path_crs, lines, axes): - """ - Check that the paths in `lines` match the given expected paths when + """Check that the paths in `lines` match the given expected paths when plotted on the given geoaxes """ diff --git a/lib/iris/tests/unit/util/test__coord_regular.py b/lib/iris/tests/unit/util/test__coord_regular.py index bd9f8f3430..e525ec56a4 100644 --- a/lib/iris/tests/unit/util/test__coord_regular.py +++ b/lib/iris/tests/unit/util/test__coord_regular.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test elements of :mod:`iris.util` that deal with checking coord regularity. +"""Test elements of :mod:`iris.util` that deal with checking coord regularity. Specifically, this module tests the following functions: * :func:`iris.util.is_regular`, diff --git a/lib/iris/tests/unit/util/test__mask_array.py b/lib/iris/tests/unit/util/test__mask_array.py index 2245576de9..7603940632 100644 --- a/lib/iris/tests/unit/util/test__mask_array.py +++ b/lib/iris/tests/unit/util/test__mask_array.py @@ -38,8 +38,7 @@ ) @pytest.mark.parametrize("lazy_array", [False, True], ids=["real", "lazy"]) def test_1d_not_in_place(array, mask, expected, lazy_array, lazy_mask): - """ - Basic test for expected behaviour when working not in place with various + """Basic test for expected behaviour when working not in place with various array types for input. """ @@ -63,8 +62,7 @@ def test_1d_not_in_place(array, mask, expected, lazy_array, lazy_mask): def test_plain_array_in_place(): - """ - Test we get an informative error when trying to add a mask to a plain numpy + """Test we get an informative error when trying to add a mask to a plain numpy array. """ @@ -77,8 +75,7 @@ def test_plain_array_in_place(): def test_masked_array_lazy_mask_in_place(): - """ - Test we get an informative error when trying to apply a lazy mask in-place + """Test we get an informative error when trying to apply a lazy mask in-place to a non-lazy array. """ @@ -94,10 +91,7 @@ def test_masked_array_lazy_mask_in_place(): "mask", [mask_1d, masked_mask_1d], ids=["plain-mask", "masked-mask"] ) def test_real_masked_array_in_place(mask): - """ - Check expected behaviour for applying masks in-place to a masked array. - - """ + """Check expected behaviour for applying masks in-place to a masked array.""" arr = masked_arr_1d.copy() result = _mask_array(arr, mask, in_place=True) assert_masked_array_equal(arr, expected2) @@ -106,8 +100,7 @@ def test_real_masked_array_in_place(mask): def test_lazy_array_in_place(): - """ - Test that in place flag is ignored for lazy arrays, and result is the same + """Test that in place flag is ignored for lazy arrays, and result is the same as the not in_place case. """ diff --git a/lib/iris/tests/unit/util/test__slice_data_with_keys.py b/lib/iris/tests/unit/util/test__slice_data_with_keys.py index a46657a3e4..eda4f91055 100644 --- a/lib/iris/tests/unit/util/test__slice_data_with_keys.py +++ b/lib/iris/tests/unit/util/test__slice_data_with_keys.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.util._slice_data_with_keys`. +"""Test function :func:`iris.util._slice_data_with_keys`. Note: much of the functionality really belongs to the other routines, :func:`iris.util._build_full_slice_given_keys`, and diff --git a/lib/iris/tests/unit/util/test_equalise_attributes.py b/lib/iris/tests/unit/util/test_equalise_attributes.py index 8cc4936908..fb8dc572cf 100644 --- a/lib/iris/tests/unit/util/test_equalise_attributes.py +++ b/lib/iris/tests/unit/util/test_equalise_attributes.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Unit tests for the :func:`iris.util.equalise_attributes` function. +"""Unit tests for the :func:`iris.util.equalise_attributes` function. """ @@ -154,8 +153,7 @@ def test_complex_somecommon(self): class TestSplitattributes: - """ - Extra testing for cases where attributes differ specifically by type + """Extra testing for cases where attributes differ specifically by type That is, where there is a new possibility of 'mismatch' due to the newer "typing" of attributes as global or local. diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py index 2630caa0e5..93385ed0e0 100644 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ b/lib/iris/tests/unit/util/test_file_is_newer_than.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.util.test_file_is_newer`. +"""Test function :func:`iris.util.test_file_is_newer`. """ diff --git a/lib/iris/tests/unit/util/test_mask_cube.py b/lib/iris/tests/unit/util/test_mask_cube.py index 0caa70cff2..ef1678be06 100644 --- a/lib/iris/tests/unit/util/test_mask_cube.py +++ b/lib/iris/tests/unit/util/test_mask_cube.py @@ -30,8 +30,7 @@ def full2d_global(): class MaskCubeMixin: def assertOriginalMetadata(self, cube, func): - """ - Check metadata matches that of input cube. func is a string indicating + """Check metadata matches that of input cube. func is a string indicating which function created the original cube. """ diff --git a/lib/iris/util.py b/lib/iris/util.py index 95b65bcbcd..2aaa827a65 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Miscellaneous utility functions. +"""Miscellaneous utility functions. """ @@ -29,8 +28,7 @@ def broadcast_to_shape(array, shape, dim_map): - """ - Broadcast an array to a given shape. + """Broadcast an array to a given shape. Each dimension of the array must correspond to a dimension in the given shape. The result is a read-only view (see :func:`numpy.broadcast_to`). @@ -101,8 +99,7 @@ def broadcast_to_shape(array, shape, dim_map): def delta(ndarray, dimension, circular=False): - """ - Calculates the difference between values along a given dimension. + """Calculates the difference between values along a given dimension. Args: @@ -165,8 +162,7 @@ def delta(ndarray, dimension, circular=False): def describe_diff(cube_a, cube_b, output_file=None): - """ - Prints the differences that prevent compatibility between two cubes, as + """Prints the differences that prevent compatibility between two cubes, as defined by :meth:`iris.cube.Cube.is_compatible()`. Args: @@ -238,8 +234,7 @@ def describe_diff(cube_a, cube_b, output_file=None): def guess_coord_axis(coord): - """ - Returns a "best guess" axis name of the coordinate. + """Returns a "best guess" axis name of the coordinate. Heuristic categorisation of the coordinate into either label 'T', 'Z', 'Y', 'X' or None. @@ -291,8 +286,7 @@ def guess_coord_axis(coord): def rolling_window(a, window=1, step=1, axis=-1): - """ - Make an ndarray with a rolling window of the last dimension + """Make an ndarray with a rolling window of the last dimension Args: @@ -364,8 +358,7 @@ def rolling_window(a, window=1, step=1, axis=-1): def array_equal(array1, array2, withnans=False): - """ - Returns whether two arrays have the same shape and elements. + """Returns whether two arrays have the same shape and elements. Args: @@ -407,8 +400,7 @@ def normalise_array(array): def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): - """ - Returns whether two numbers are almost equal, allowing for the finite + """Returns whether two numbers are almost equal, allowing for the finite precision of floating point numbers. .. deprecated:: 3.2.0 @@ -442,8 +434,7 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): def between(lh, rh, lh_inclusive=True, rh_inclusive=True): - """ - Provides a convenient way of defining a 3 element inequality such as + """Provides a convenient way of defining a 3 element inequality such as ``a < number < b``. Arguments: @@ -490,8 +481,7 @@ def between(lh, rh, lh_inclusive=True, rh_inclusive=True): def reverse(cube_or_array, coords_or_dims): - """ - Reverse the cube or array along the given dimensions. + """Reverse the cube or array along the given dimensions. Args: @@ -586,8 +576,7 @@ def reverse(cube_or_array, coords_or_dims): def monotonic(array, strict=False, return_direction=False): - """ - Return whether the given 1d array is monotonic. + """Return whether the given 1d array is monotonic. Note that, the array must not contain missing data. @@ -653,8 +642,7 @@ def monotonic(array, strict=False, return_direction=False): def column_slices_generator(full_slice, ndims): - """ - Given a full slice full of tuples, return a dictionary mapping old + """Given a full slice full of tuples, return a dictionary mapping old data dimensions to new and a generator which gives the successive slices needed to index correctly (across columns). @@ -727,8 +715,7 @@ def is_tuple_style_index(key): def _build_full_slice_given_keys(keys, ndim): - """ - Given the keys passed to a __getitem__ call, build an equivalent + """Given the keys passed to a __getitem__ call, build an equivalent tuple of keys which span ndims. """ @@ -791,8 +778,7 @@ def _build_full_slice_given_keys(keys, ndim): def _slice_data_with_keys(data, keys): - """ - Index an array-like object as "data[keys]", with orthogonal indexing. + """Index an array-like object as "data[keys]", with orthogonal indexing. Args: @@ -837,8 +823,7 @@ def _slice_data_with_keys(data, keys): def _wrap_function_for_method(function, docstring=None): - """ - Returns a wrapper function modified to be suitable for use as a + """Returns a wrapper function modified to be suitable for use as a method. The wrapper function renames the first argument as "self" and allows @@ -884,8 +869,7 @@ def _wrap_function_for_method(function, docstring=None): class _MetaOrderedHashable(ABCMeta): - """ - A metaclass that ensures that non-abstract subclasses of _OrderedHashable + """A metaclass that ensures that non-abstract subclasses of _OrderedHashable without an explicit __init__ method are given a default __init__ method with the appropriate method signature. @@ -930,8 +914,7 @@ def __new__(cls, name, bases, namespace): @functools.total_ordering class _OrderedHashable(Hashable, metaclass=_MetaOrderedHashable): - """ - Convenience class for creating "immutable", hashable, and ordered classes. + """Convenience class for creating "immutable", hashable, and ordered classes. Instance identity is defined by the specific list of attribute names declared in the abstract attribute "_names". Subclasses must declare the @@ -951,8 +934,7 @@ class _OrderedHashable(Hashable, metaclass=_MetaOrderedHashable): @property @abstractmethod def _names(self): - """ - Override this attribute to declare the names of all the attributes + """Override this attribute to declare the names of all the attributes relevant to the hash/comparison semantics. """ @@ -1019,8 +1001,7 @@ def create_temp_filename(suffix=""): def clip_string(the_str, clip_length=70, rider="..."): - """ - Returns a clipped version of the string based on the specified clip + """Returns a clipped version of the string based on the specified clip length and whether or not any graceful clip points can be found. If the string to be clipped is shorter than the specified clip @@ -1075,8 +1056,7 @@ def clip_string(the_str, clip_length=70, rider="..."): def format_array(arr): - """ - Returns the given array as a string, using the python builtin str + """Returns the given array as a string, using the python builtin str function on a piecewise basis. Useful for xml representation of arrays. @@ -1103,8 +1083,7 @@ def format_array(arr): def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy - """ - Create a new axis as the leading dimension of the cube, promoting a scalar + """Create a new axis as the leading dimension of the cube, promoting a scalar coordinate if specified. Args: @@ -1228,8 +1207,7 @@ def _handle_dimensional_metadata(cube, dm_item, cube_add_method, expand_extras): def squeeze(cube): - """ - Removes any dimension of length one. If it has an associated DimCoord or + """Removes any dimension of length one. If it has an associated DimCoord or AuxCoord, this becomes a scalar coord. Args: @@ -1264,8 +1242,7 @@ def squeeze(cube): def file_is_newer_than(result_path, source_paths): - """ - Return whether the 'result' file has a later modification time than all of + """Return whether the 'result' file has a later modification time than all of the 'source' files. If a stored result depends entirely on known 'sources', it need only be @@ -1321,8 +1298,7 @@ def file_is_newer_than(result_path, source_paths): def is_regular(coord): - """ - Determine if the given coord is regular. + """Determine if the given coord is regular. Notes ------ @@ -1339,8 +1315,7 @@ def is_regular(coord): def regular_step(coord): - """ - Return the regular step from a coord or fail. + """Return the regular step from a coord or fail. Notes ------ @@ -1423,8 +1398,7 @@ def points_step(points): def unify_time_units(cubes): - """ - Performs an in-place conversion of the time units of all time coords in the + """Performs an in-place conversion of the time units of all time coords in the cubes in a given iterable. One common epoch is defined for each calendar found in the cubes to prevent units being defined with inconsistencies between epoch and calendar. During this process, all time coordinates have @@ -1460,8 +1434,7 @@ def unify_time_units(cubes): def _is_circular(points, modulus, bounds=None): - """ - Determine whether the provided points or bounds are circular in nature + """Determine whether the provided points or bounds are circular in nature relative to the modulus value. If the bounds are provided then these are checked for circularity rather @@ -1528,8 +1501,7 @@ def _is_circular(points, modulus, bounds=None): def promote_aux_coord_to_dim_coord(cube, name_or_coord): - """ - Promotes an AuxCoord on the cube to a DimCoord. This AuxCoord must be + """Promotes an AuxCoord on the cube to a DimCoord. This AuxCoord must be associated with a single cube dimension. If the AuxCoord is associated with a dimension that already has a DimCoord, that DimCoord gets demoted to an AuxCoord. @@ -1656,8 +1628,7 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): def demote_dim_coord_to_aux_coord(cube, name_or_coord): - """ - Demotes a dimension coordinate on the cube to an auxiliary coordinate. + """Demotes a dimension coordinate on the cube to an auxiliary coordinate. The DimCoord is demoted to an auxiliary coordinate on the cube. The dimension of the cube that was associated with the DimCoord becomes @@ -1752,8 +1723,7 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): @functools.wraps(np.meshgrid) def _meshgrid(*xi, **kwargs): - """ - @numpy v1.13, the dtype of each output n-D coordinate is the same as its + """@numpy v1.13, the dtype of each output n-D coordinate is the same as its associated input 1D coordinate. This is not the case prior to numpy v1.13, where the output dtype is cast up to its highest resolution, regardlessly. @@ -1771,8 +1741,7 @@ def _meshgrid(*xi, **kwargs): def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): - """ - Searches the 'x' and 'y' coord on the cube for discontiguities in the + """Searches the 'x' and 'y' coord on the cube for discontiguities in the bounds array, returned as a boolean array (True for all cells which are discontiguous with the cell immediately above them or to their right). @@ -1866,8 +1835,7 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): def _mask_array(array, points_to_mask, in_place=False): - """ - Apply masking to array where points_to_mask is True/non-zero. Designed to + """Apply masking to array where points_to_mask is True/non-zero. Designed to work with iris.analysis.maths._binary_op_common so array and points_to_mask will be broadcastable to each other. array and points_to_mask may be numpy or dask types (or one of each). @@ -1920,8 +1888,7 @@ def _mask_array(array, points_to_mask, in_place=False): @_lenient_client(services=SERVICES) def mask_cube(cube, points_to_mask, in_place=False, dim=None): - """ - Masks any cells in the cube's data array which correspond to cells marked + """Masks any cells in the cube's data array which correspond to cells marked ``True`` (or non zero) in ``points_to_mask``. ``points_to_mask`` may be specified as a :class:`numpy.ndarray`, :class:`dask.array.Array`, :class:`iris.coords.Coord` or :class:`iris.cube.Cube`, following the same @@ -1988,8 +1955,7 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): def equalise_attributes(cubes): - """ - Delete cube attributes that are not identical over all cubes in a group. + """Delete cube attributes that are not identical over all cubes in a group. This function deletes any attributes which are not the same for all the given cubes. The cubes will then have identical attributes, and the @@ -2065,8 +2031,7 @@ def equalise_attributes(cubes): def is_masked(array): - """ - Equivalent to :func:`numpy.ma.is_masked`, but works for both lazy AND realised arrays. + """Equivalent to :func:`numpy.ma.is_masked`, but works for both lazy AND realised arrays. Parameters ---------- @@ -2093,8 +2058,7 @@ def is_masked(array): def _strip_metadata_from_dims(cube, dims): - """ - Remove ancillary variables and cell measures that map to specific dimensions. + """Remove ancillary variables and cell measures that map to specific dimensions. Returns a cube copy with (possibly) some cell-measures and ancillary variables removed. diff --git a/noxfile.py b/noxfile.py index 9a27b5db98..4bdde52d41 100755 --- a/noxfile.py +++ b/noxfile.py @@ -1,5 +1,4 @@ -""" -Perform test automation with nox. +"""Perform test automation with nox. For further details, see https://nox.thea.codes/en/stable/# @@ -69,8 +68,7 @@ def venv_changed(session: nox.sessions.Session) -> bool: def cache_venv(session: nox.sessions.Session) -> None: - """ - Cache the nox session environment. + """Cache the nox session environment. This consists of saving a hexdigest (sha256) of the associated conda lock file. @@ -90,8 +88,7 @@ def cache_venv(session: nox.sessions.Session) -> None: def cache_cartopy(session: nox.sessions.Session) -> None: - """ - Determine whether to cache the cartopy natural earth shapefiles. + """Determine whether to cache the cartopy natural earth shapefiles. Parameters ---------- @@ -108,8 +105,7 @@ def cache_cartopy(session: nox.sessions.Session) -> None: def prepare_venv(session: nox.sessions.Session) -> None: - """ - Create and cache the nox session conda environment, and additionally + """Create and cache the nox session conda environment, and additionally provide conda environment package details and info. Note that, iris is installed into the environment using pip. @@ -167,8 +163,7 @@ def prepare_venv(session: nox.sessions.Session) -> None: @nox.session(python=PY_VER, venv_backend="conda") def tests(session: nox.sessions.Session): - """ - Perform iris system, integration and unit tests. + """Perform iris system, integration and unit tests. Coverage testing is enabled if the "--coverage" or "-c" flag is used. @@ -194,8 +189,7 @@ def tests(session: nox.sessions.Session): @nox.session(python=_PY_VERSION_DOCSBUILD, venv_backend="conda") def doctest(session: nox.sessions.Session): - """ - Perform iris doctests and gallery. + """Perform iris doctests and gallery. Parameters ---------- @@ -222,8 +216,7 @@ def doctest(session: nox.sessions.Session): @nox.session(python=_PY_VERSION_DOCSBUILD, venv_backend="conda") def gallery(session: nox.sessions.Session): - """ - Perform iris gallery doc-tests. + """Perform iris gallery doc-tests. Parameters ---------- @@ -244,8 +237,7 @@ def gallery(session: nox.sessions.Session): @nox.session(python=_PY_VERSION_DOCSBUILD, venv_backend="conda") def linkcheck(session: nox.sessions.Session): - """ - Perform iris doc link check. + """Perform iris doc link check. Parameters ---------- @@ -271,8 +263,7 @@ def linkcheck(session: nox.sessions.Session): @nox.session(python=PY_VER, venv_backend="conda") def wheel(session: nox.sessions.Session): - """ - Perform iris local wheel install and import test. + """Perform iris local wheel install and import test. Parameters ---------- diff --git a/setup.py b/setup.py index dbba57306b..6e58a7999d 100644 --- a/setup.py +++ b/setup.py @@ -23,8 +23,7 @@ def run(self): def custom_command(cmd, help=""): - """ - Factory function to generate a custom command that adds additional + """Factory function to generate a custom command that adds additional behaviour to build the CF standard names module. """ From c6da1af3456e8ed6f0727c7b87df8c6b6c56c6a8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 09:32:32 +0000 Subject: [PATCH 118/134] Bump scitools/workflows from 2023.10.0 to 2023.12.1 (#5648) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2023.10.0 to 2023.12.1. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2023.10.0...2023.12.1) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 65716338de..abe77be606 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.10.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.12.1 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index d92b653f26..d1753a7b1b 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.10.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.12.1 secrets: inherit From d05cc8e2c690149373cf4982e190904e1e50adf4 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 09:34:32 +0000 Subject: [PATCH 119/134] Updated environment lockfiles (#5645) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 32 ++++++++++++------------ requirements/locks/py311-linux-64.lock | 34 +++++++++++++------------- requirements/locks/py39-linux-64.lock | 30 +++++++++++------------ 3 files changed, 48 insertions(+), 48 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 53ffa7f535..1c9184e51a 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -30,11 +30,11 @@ https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76 https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 @@ -63,13 +63,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.co https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.3-h232c23b_0.conda#bc6ac4c0cea148d924f621985bc3892b https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 @@ -102,13 +102,13 @@ https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.con https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py310hc6cd4ac_0.conda#7f987c519edb4df04d21a282678368cf -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda#db16c66b759a64dc5183d69cc3745a52 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda#b38946846cdf39f9bce93f75f571d913 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.2-pyhca7485f_0.conda#bf40f2a8835b78b1f91083d306b493d2 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -149,7 +149,7 @@ https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda#b23e0147fa5f7a9380e06334c7266ad5 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.9.0-pyha770c72_0.conda#a92a6440c3fe7052d63244f3aba2a4a7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda#72637c58d36d9475fda24700c9796f19 https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -157,11 +157,11 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 +https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda#45846a970e71ac98fd327da5d40a0a2c -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.conda#33c03cd5711885c920ddff676fb84f98 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.3-py310h2372a71_0.conda#c07e83a9bd8f5053b42be842b9871df9 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py310h2372a71_0.conda#3c0109417cbcdabfed289360886b036d https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h4f84152_100.conda#d471a5c3abc984b662d9bae3bb7fd8a5 @@ -177,12 +177,12 @@ https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.9.0-hd8ed1ab_0.conda#c16524c1b7227dc80b36b4fa6f77cc86 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h9612171_113.conda#b2414908e43c442ddc68e6148774a304 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py310hb13e2d6_0.conda#d3147cfbf72d6ae7bba10562208f6def https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310hd5c30f3_5.conda#dc2ee770a2299307f3c127af79160d25 @@ -193,7 +193,7 @@ https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hd41b1e2_4.conda#35e87277fba9944b8a975113538bb5df https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py310h1f7b6fc_0.conda#31beda75384647959d5792a1a7dc571a https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py310hd41b1e2_0.conda#85d2aaa7af046528d339da1e813c3a9f -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.0-pyhd8ed1ab_0.conda#95eae0785aed72998493140dc0115382 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.1-pyhd8ed1ab_0.conda#bf6ad72d882bc3f04e6a0fb50fd2cce8 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py310h2372a71_1.conda#dfcf64f67961eb9686676f96fdb4b4d1 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 @@ -204,14 +204,14 @@ https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py310hb13e2d6_0.con https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py310hc3e127f_1.conda#fdaca8d27b3af78d617521eb37b1d055 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h1f7b6fc_4.conda#0ca55ca20891d393846695354b32ebc5 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.0-pyhd8ed1ab_0.conda#22d620e1079e99c34578cb0c615d2789 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.1-pyhd8ed1ab_0.conda#6b31b9b627f238a0068926d5650ae128 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h7f000aa_3.conda#0abfa7f9241a0f4fd732bc15773cfb0c https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-he3f83f7_1.conda#03bd1ddcc942867a19528877143b9852 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py310h62c0568_0.conda#3cbbc7d0b54df02c9a006d3de14911d9 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py310hba70d50_100.conda#e19392760c7e4da3b9cb0ee5bf61bc4b -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.6.0-pyha770c72_0.conda#473a7cfca197da0a10cff3f6dded7d4b https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h1f7b6fc_1.conda#857b828a13cdddf568958f7575b25b22 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310hcc13569_1.conda#31ef447724fb19066a9d00a660dab1bd https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 3c655e2192..455cbd7a9b 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -30,11 +30,11 @@ https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76 https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 @@ -63,13 +63,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.co https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.3-h232c23b_0.conda#bc6ac4c0cea148d924f621985bc3892b https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 @@ -84,7 +84,7 @@ https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd9 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.3-h783c2da_0.conda#9bd06b12bbfa6fd1740fd23af4b0f0c7 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda#b0dfbe2fcbfdb097d321bfd50ecddab1 +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.7-hab00c5b_0_cpython.conda#bf281a975393266ab95734a8cfd532ec https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b @@ -102,13 +102,13 @@ https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.con https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py311hb755f60_0.conda#88cc84238dda72e11285d9cfcbe43e51 -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda#db16c66b759a64dc5183d69cc3745a52 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda#b38946846cdf39f9bce93f75f571d913 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.2-pyhca7485f_0.conda#bf40f2a8835b78b1f91083d306b493d2 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -149,18 +149,18 @@ https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_1.conda#a700fcb5cedd3e72d0c75d095c7a6eda -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.9.0-pyha770c72_0.conda#a92a6440c3fe7052d63244f3aba2a4a7 https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 +https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.3-py311h459d7ec_0.conda#9db2c1316e96068c0189beaeb716f3fe https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py311h459d7ec_0.conda#a14114f70e23f7fd5ab9941fec45b095 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h4f84152_100.conda#d471a5c3abc984b662d9bae3bb7fd8a5 @@ -176,12 +176,12 @@ https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.9.0-hd8ed1ab_0.conda#c16524c1b7227dc80b36b4fa6f77cc86 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h9612171_113.conda#b2414908e43c442ddc68e6148774a304 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py311h64a7726_0.conda#fd2f142dcd680413b5ede5d0fb799205 https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311hca0b8b9_5.conda#cac429fcb9126d5e6f02c8ba61c2a811 @@ -192,7 +192,7 @@ https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py311h1f0f07a_0.conda#b7e6d52b39e199238c3400cafaabafb3 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.0-pyhd8ed1ab_0.conda#95eae0785aed72998493140dc0115382 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.1-pyhd8ed1ab_0.conda#bf6ad72d882bc3f04e6a0fb50fd2cce8 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py311h459d7ec_1.conda#45b8d355bbcdd27588c2d266bcfdff84 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 @@ -203,14 +203,14 @@ https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py311h64a7726_0.con https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311h2032efe_1.conda#4ba860ff851768615b1a25b788022750 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_4.conda#1e105c1a8ea2163507726144b401eb1b -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.0-pyhd8ed1ab_0.conda#22d620e1079e99c34578cb0c615d2789 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.1-pyhd8ed1ab_0.conda#6b31b9b627f238a0068926d5650ae128 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h7f000aa_3.conda#0abfa7f9241a0f4fd732bc15773cfb0c https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-he3f83f7_1.conda#03bd1ddcc942867a19528877143b9852 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py311h54ef318_0.conda#9f80753bc008bfc9b95f39d9ff9f1694 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py311he8ad708_100.conda#597b1ad6cb7011b7561c20ea30295cae -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.6.0-pyha770c72_0.conda#473a7cfca197da0a10cff3f6dded7d4b https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_1.conda#cd36a89a048ad2bcc6d8b43f648fb1d0 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_1.conda#10d1806e20da040c58c36deddf51c70c https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 0b98e25ab8..8724473f86 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -30,11 +30,11 @@ https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76 https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 @@ -63,13 +63,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.co https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.3-h232c23b_0.conda#bc6ac4c0cea148d924f621985bc3892b https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 @@ -102,13 +102,13 @@ https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.con https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py39h3d6467e_0.conda#bfde3cf098e298b81d1c1cbc9c79ab59 -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda#db16c66b759a64dc5183d69cc3745a52 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda#b38946846cdf39f9bce93f75f571d913 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.2-pyhca7485f_0.conda#bf40f2a8835b78b1f91083d306b493d2 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -148,7 +148,7 @@ https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04e https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_1.conda#cbe186eefb0bcd91e8f47c3908489874 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.9.0-pyha770c72_0.conda#a92a6440c3fe7052d63244f3aba2a4a7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hd1e30aa_0.conda#1da984bbb6e765743e13388ba7b7b2c8 https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec @@ -156,7 +156,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 +https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda#ac992767d7f8ed2cb27e71e78f0fb2d7 @@ -176,13 +176,13 @@ https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.9.0-hd8ed1ab_0.conda#c16524c1b7227dc80b36b4fa6f77cc86 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h9612171_113.conda#b2414908e43c442ddc68e6148774a304 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py39h474f0d3_0.conda#459a58eda3e74dd5e3d596c618e7f20a https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39h15b0fa6_5.conda#85e186c7ff673b0d0026782ec353fb2a @@ -192,7 +192,7 @@ https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39h7633fee_4.conda#b66595fbda99771266f042f42c7457be https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py39h44dd56e_0.conda#baea2f5dfb3ab7b1c836385d2e1daca7 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39h7633fee_0.conda#ed71ad3e30eb03da363fb797419cce98 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.0-pyhd8ed1ab_0.conda#95eae0785aed72998493140dc0115382 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.1-pyhd8ed1ab_0.conda#bf6ad72d882bc3f04e6a0fb50fd2cce8 https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py39hd1e30aa_1.conda#ca63612907462c8e36edcc9bbacc253e https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 @@ -203,14 +203,14 @@ https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py39h474f0d3_0.cond https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py39h6404dd3_1.conda#05623249055d99c51cde021b525611db https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h44dd56e_4.conda#81310d21bf9d91754c1220c585bb72d6 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.0-pyhd8ed1ab_0.conda#22d620e1079e99c34578cb0c615d2789 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.1-pyhd8ed1ab_0.conda#6b31b9b627f238a0068926d5650ae128 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h7f000aa_3.conda#0abfa7f9241a0f4fd732bc15773cfb0c https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-he3f83f7_1.conda#03bd1ddcc942867a19528877143b9852 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py39he9076e7_0.conda#6085411aa2f0b2b801d3b46e1d3b83c5 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py39h4282601_100.conda#d2809fbf0d8ae7b8ca92c456cb44a7d4 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.6.0-pyha770c72_0.conda#473a7cfca197da0a10cff3f6dded7d4b https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h44dd56e_1.conda#90c5165691fdcb5a9f43907e32ea48b4 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39hddac248_1.conda#8dd2eb1e7aa9a33a92a75bdcea3f0dd0 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 From 454c9053d4677f87f821f6764f1c0bc95e4a17f1 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Mon, 18 Dec 2023 10:09:09 +0000 Subject: [PATCH 120/134] fix for ruff rule D301 (#5646) * fixed for ruff rule D301. * additional fix * remove debug comment. --- .ruff.toml | 1 - benchmarks/benchmarks/cperf/equality.py | 4 ++-- benchmarks/benchmarks/sperf/equality.py | 2 +- lib/iris/_constraints.py | 2 +- lib/iris/_merge.py | 2 +- lib/iris/cube.py | 12 ++++++------ lib/iris/exceptions.py | 2 +- lib/iris/experimental/ugrid/load.py | 4 ++-- lib/iris/experimental/ugrid/mesh.py | 4 ++-- lib/iris/fileformats/_nc_load_rules/engine.py | 2 +- lib/iris/pandas.py | 4 ++-- lib/iris/tests/test_coding_standards.py | 4 ++-- .../tests/unit/coords/test__DimensionalMetadata.py | 2 +- 13 files changed, 22 insertions(+), 23 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index e09d03c2d4..7ee7f985af 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -42,7 +42,6 @@ lint.ignore = [ "D211", # No blank lines allowed before class docstring "D214", # Section is over-indented "D300", # triple double quotes `""" / Use triple single quotes `'''` - "D301", # Use `r"""` if any backslashes in a docstring "D400", # First line should end with a period "D401", # First line of docstring should be in imperative mood: ... "D403", # First word of the first line should be capitalized diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py index 9a3ceded9f..a25cf99128 100644 --- a/benchmarks/benchmarks/cperf/equality.py +++ b/benchmarks/benchmarks/cperf/equality.py @@ -9,7 +9,7 @@ class EqualityMixin(SingleDiagnosticMixin): - """Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing + r"""Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing :class:`~iris.cube.Cube`\\ s that have been loaded from file. """ @@ -24,7 +24,7 @@ def setup(self, file_type, three_d=False, three_times=False): @on_demand_benchmark class CubeEquality(EqualityMixin): - """Benchmark time and memory costs of comparing LFRic and UM + r"""Benchmark time and memory costs of comparing LFRic and UM :class:`~iris.cube.Cube`\\ s. """ diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py index 827490b082..813cfad6bf 100644 --- a/benchmarks/benchmarks/sperf/equality.py +++ b/benchmarks/benchmarks/sperf/equality.py @@ -10,7 +10,7 @@ @on_demand_benchmark class CubeEquality(FileMixin): - """Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s + r"""Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. Uses :class:`FileMixin` as the realistic case will be comparing diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 4ce14d5ece..1d81cd603b 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -33,7 +33,7 @@ def __init__(self, name=None, cube_func=None, coord_values=None, **kwargs): ---------- name : str or None, optional If a string, it is used as the name to match against the - :attr:`iris.cube.Cube.names` property. TREMTEST + :attr:`iris.cube.Cube.names` property. cube_func : callable or None, optional If a callable, it must accept a Cube as its first and only argument and return either True or False. diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index db2b410b94..19848ff244 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -525,7 +525,7 @@ def _is_combination(name): def build_indexes(positions): - """Construct a mapping for each candidate dimension that maps for each + r"""Construct a mapping for each candidate dimension that maps for each of its scalar values the set of values for each of the other candidate dimensions. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 7e35893533..aff9ad5d0d 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -752,7 +752,7 @@ def _is_single_item(testee): class CubeAttrsDict(MutableMapping): - """A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, + r"""A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, providing unified user access to combined cube "local" and "global" attributes dictionaries, with the access behaviour of an ordinary (single) dictionary. @@ -2018,7 +2018,7 @@ def coords( dim_coords=None, mesh_coords=None, ): - """Return a list of coordinates from the :class:`Cube` that match the + r"""Return a list of coordinates from the :class:`Cube` that match the provided criteria. .. seealso:: @@ -2184,7 +2184,7 @@ def coord( dim_coords=None, mesh_coords=None, ): - """Return a single coordinate from the :class:`Cube` that matches the + r"""Return a single coordinate from the :class:`Cube` that matches the provided criteria. .. note:: @@ -2359,7 +2359,7 @@ def _any_meshcoord(self): @property def mesh(self): - """Return the unstructured :class:`~iris.experimental.ugrid.Mesh` + r"""Return the unstructured :class:`~iris.experimental.ugrid.Mesh` associated with the cube, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. @@ -2379,7 +2379,7 @@ def mesh(self): @property def location(self): - """Return the mesh "location" of the cube data, if the cube has any + r"""Return the mesh "location" of the cube data, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. @@ -2398,7 +2398,7 @@ def location(self): return result def mesh_dim(self): - """Return the cube dimension of the mesh, if the cube has any + r"""Return the cube dimension of the mesh, if the cube has any :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 8fad591658..36523b8ed6 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -176,7 +176,7 @@ class CannotAddError(ValueError): class IrisUserWarning(UserWarning): - """Base class for :class:`UserWarning`\\ s generated by Iris.""" + r"""Base class for :class:`UserWarning`\\ s generated by Iris.""" pass diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index 317d64ca04..07cc20a65a 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Extensions to Iris' NetCDF loading to allow the construction of +r"""Extensions to Iris' NetCDF loading to allow the construction of :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es from UGRID data in the file. Eventual destination: :mod:`iris.fileformats.netcdf`. @@ -143,7 +143,7 @@ def load_mesh(uris, var_name=None): def load_meshes(uris, var_name=None): - """Load :class:`~iris.experimental.ugrid.mesh.Mesh` objects from one or more NetCDF files. + r"""Load :class:`~iris.experimental.ugrid.mesh.Mesh` objects from one or more NetCDF files. Parameters ---------- diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 02b26f41f1..5d320384b7 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -659,7 +659,7 @@ def normalise(element, axis): @classmethod def from_coords(cls, *coords): - """Construct a :class:`Mesh` by derivation from one or more + r"""Construct a :class:`Mesh` by derivation from one or more :class:`~iris.coords.Coord`\\ s. The :attr:`~Mesh.topology_dimension`, :class:`~iris.coords.Coord` @@ -1865,7 +1865,7 @@ def to_MeshCoord(self, location, axis): return MeshCoord(mesh=self, location=location, axis=axis) def to_MeshCoords(self, location): - """Generate a tuple of + r"""Generate a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord`\\ s, each referencing the current :class:`Mesh`, one for each :attr:`AXES` value, passing through the ``location`` argument. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index 20527fdee4..e43a1c5c4b 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -20,7 +20,7 @@ class FactEntity: - """An object with an 'entity_lists' property which is a dict of 'FactList's. + r"""An object with an 'entity_lists' property which is a dict of 'FactList's. A Factlist, in turn, is an object with property 'case_specific_facts', which is a list of tuples of strings diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index c5b4b0b498..fa12f087e4 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -192,7 +192,7 @@ def as_cubes( cell_measure_cols=None, ancillary_variable_cols=None, ): - """Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. + r"""Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. The index of `pandas_structure` will be used for generating the :class:`~iris.cube.Cube` dimension(s) and :class:`~iris.coords.DimCoord`\\ s. @@ -617,7 +617,7 @@ def as_data_frame( add_cell_measures=False, add_ancillary_variables=False, ): - """Convert a :class:`~iris.cube.Cube` to a :class:`pandas.DataFrame`. + r"""Convert a :class:`~iris.cube.Cube` to a :class:`pandas.DataFrame`. :attr:`~iris.cube.Cube.dim_coords` and :attr:`~iris.cube.Cube.data` are flattened into a long-style :class:`~pandas.DataFrame`. Other diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 7be3acca63..423d6bd092 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -125,7 +125,7 @@ def test_python_versions(): def test_categorised_warnings(): - """To ensure that all UserWarnings raised by Iris are categorised, for ease of use. + r"""To ensure that all UserWarnings raised by Iris are categorised, for ease of use. No obvious category? Use the parent: :class:`iris.exceptions.IrisUserWarning`. @@ -182,7 +182,7 @@ class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): class TestLicenseHeaders(tests.IrisTest): @staticmethod def whatchanged_parse(whatchanged_output): - """Returns a generator of tuples of data parsed from + r"""Returns a generator of tuples of data parsed from "git whatchanged --pretty='TIME:%at". The tuples are of the form ``(filename, last_commit_datetime)`` diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index cf850ce907..caff08fb66 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -172,7 +172,7 @@ def coord_representations(self, *args, **kwargs): return self.repr_str_strings(coord) def assertLines(self, list_of_expected_lines, string_result): - """Assert equality between a result and expected output lines. + r"""Assert equality between a result and expected output lines. For convenience, the 'expected lines' are joined with a '\\n', because a list of strings is nicer to construct in code. From fd548fdb738405adf908cfed1abd70ca26a48725 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 18 Dec 2023 14:07:23 +0000 Subject: [PATCH 121/134] Remove compliant ignored ruff rules (#5649) --- .ruff.toml | 48 ------------------------------------------------ 1 file changed, 48 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 7ee7f985af..fb5c9cd96a 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -58,22 +58,10 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#pyupgrade-up "UP", - # flake8-2020 (YTT) - # https://docs.astral.sh/ruff/rules/#flake8-2020-ytt - "YTT", - # flake8-annotations (ANN) # https://docs.astral.sh/ruff/rules/#flake8-annotations-ann "ANN", - # flake8-async (ASYNC) - # https://docs.astral.sh/ruff/rules/#flake8-async-async - "ASYNC", - - # flake8-trio (TRIO) - # https://docs.astral.sh/ruff/rules/#flake8-trio-trio - "TRIO", - # flake8-bandit (S) # https://docs.astral.sh/ruff/rules/#flake8-bandit-s "S", @@ -98,10 +86,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-commas-com "COM", - # flake8-copyright (CPY) - # https://docs.astral.sh/ruff/rules/#flake8-copyright-cpy - "CPY", - # flake8-comprehensions (C4) # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 "C4", @@ -110,14 +94,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz "DTZ", - # flake8-debugger (T10) - # https://docs.astral.sh/ruff/rules/#flake8-debugger-t10 - "T10", - - # flake8-django (DJ) - # https://docs.astral.sh/ruff/rules/#flake8-django-dj - "DJ", - # flake8-errmsg (EM) # https://docs.astral.sh/ruff/rules/#flake8-errmsg-em "EM", @@ -130,10 +106,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa "FA", - # flake8-implicit-str-concat (ISC) - # https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc - "ISC", - # flake8-import-conventions (ICN) # https://docs.astral.sh/ruff/rules/#flake8-import-conventions-icn "ICN", @@ -162,10 +134,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt "PT", - # flake8-quotes (Q) - # https://docs.astral.sh/ruff/rules/#flake8-quotes-q - "Q", - # flake8-raise (RSE) # https://docs.astral.sh/ruff/rules/#flake8-raise-rse "RSE", @@ -194,10 +162,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-type-checking-tch "TCH", - # flake8-gettext (INT) - # https://docs.astral.sh/ruff/rules/#flake8-gettext-int - "INT", - # flake8-unused-arguments (ARG) # https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg "ARG", @@ -242,22 +206,10 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#numpy-specific-rules-npy "NPY", - # Airflow (AIR) - # https://docs.astral.sh/ruff/rules/#airflow-air - "AIR", - # Perflint (PERF) # https://docs.astral.sh/ruff/rules/#perflint-perf "PERF", - # refurb (FURB) - # https://docs.astral.sh/ruff/rules/#refurb-furb - "FURB", - - # flake8-logging (LOG) - # https://docs.astral.sh/ruff/rules/#flake8-logging-log - "LOG", - # Ruff-specific rules (RUF) # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf "RUF", From 49d1b0f99a5b3f5d23cebaa54bede0af793105cd Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 18 Dec 2023 14:22:04 +0000 Subject: [PATCH 122/134] ruff com rule compliance (#5650) --- .ruff.toml | 6 +++--- pyproject.toml | 5 +++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index fb5c9cd96a..01a36c2549 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -82,9 +82,9 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-builtins-a "A", - # flake8-commas (COM) - # https://docs.astral.sh/ruff/rules/#flake8-commas-com - "COM", + # flake8-copyright (CPY) + # https://docs.astral.sh/ruff/rules/#flake8-copyright-cpy + "CPY", # flake8-comprehensions (C4) # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 diff --git a/pyproject.toml b/pyproject.toml index acbb0c9c4b..d0c2816e93 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,6 +82,11 @@ preview = false ignore = [ # NOTE: Non-permanent exclusions should be added to the ".ruff.toml" file. + # flake8-commas (COM) + # https://docs.astral.sh/ruff/rules/#flake8-commas-com + "COM812", # Trailing comma missing. + "COM819", # Trailing comma prohibited. + # flake8-implicit-str-concat (ISC) # https://docs.astral.sh/ruff/rules/single-line-implicit-string-concatenation/ # NOTE: This rule may cause conflicts when used with "ruff format". From 953ec73c10df5159f31b1199eb5479c10490744f Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 18 Dec 2023 14:25:37 +0000 Subject: [PATCH 123/134] ruff EXE rule compliance (#5651) --- .ruff.toml | 4 ---- lib/iris/tests/graphics/README.md | 0 lib/iris/tests/graphics/__init__.py | 1 - lib/iris/tests/graphics/idiff.py | 2 +- lib/iris/tests/graphics/recreate_imagerepo.py | 2 +- lib/iris/tests/integration/test_Datums.py | 0 lib/iris/tests/unit/analysis/test_MAX_RUN.py | 0 lib/iris/tests/unit/coord_systems/test_PolarStereographic.py | 0 .../test_build_polar_stereographic_coordinate_system.py | 0 .../test_has_supported_polar_stereographic_parameters.py | 0 lib/iris/tests/unit/io/test__generate_cubes.py | 0 lib/iris/tests/unit/io/test_save.py | 0 noxfile.py | 0 13 files changed, 2 insertions(+), 7 deletions(-) mode change 100755 => 100644 lib/iris/tests/graphics/README.md mode change 100755 => 100644 lib/iris/tests/graphics/__init__.py mode change 100755 => 100644 lib/iris/tests/integration/test_Datums.py mode change 100755 => 100644 lib/iris/tests/unit/analysis/test_MAX_RUN.py mode change 100755 => 100644 lib/iris/tests/unit/coord_systems/test_PolarStereographic.py mode change 100755 => 100644 lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py mode change 100755 => 100644 lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py mode change 100755 => 100644 lib/iris/tests/unit/io/test__generate_cubes.py mode change 100755 => 100644 lib/iris/tests/unit/io/test_save.py mode change 100755 => 100644 noxfile.py diff --git a/.ruff.toml b/.ruff.toml index 01a36c2549..6ecdecfd05 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -98,10 +98,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-errmsg-em "EM", - # flake8-executable (EXE) - # https://docs.astral.sh/ruff/rules/#flake8-executable-exe - "EXE", - # flake8-future-annotations (FA) # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa "FA", diff --git a/lib/iris/tests/graphics/README.md b/lib/iris/tests/graphics/README.md old mode 100755 new mode 100644 diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py old mode 100755 new mode 100644 index c62ac1bf0e..ace62248d1 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -2,7 +2,6 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# !/usr/bin/env python """Contains Iris graphic testing utilities By default, this module sets the matplotlib backend to "agg". But when diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index 53d74caf27..41cf20964f 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -1,8 +1,8 @@ +#!/usr/bin/env python # Copyright Iris contributors # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# !/usr/bin/env python """Provides "diff-like" comparison of images. Currently relies on matplotlib for image processing so limited to PNG format. diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py index 96a0e54f2b..6056eb058b 100755 --- a/lib/iris/tests/graphics/recreate_imagerepo.py +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -1,8 +1,8 @@ +#!/usr/bin/env python # Copyright Iris contributors # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# !/usr/bin/env python """Updates imagerepo.json based on the baseline images """ diff --git a/lib/iris/tests/integration/test_Datums.py b/lib/iris/tests/integration/test_Datums.py old mode 100755 new mode 100644 diff --git a/lib/iris/tests/unit/analysis/test_MAX_RUN.py b/lib/iris/tests/unit/analysis/test_MAX_RUN.py old mode 100755 new mode 100644 diff --git a/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py old mode 100755 new mode 100644 diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py old mode 100755 new mode 100644 diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py old mode 100755 new mode 100644 diff --git a/lib/iris/tests/unit/io/test__generate_cubes.py b/lib/iris/tests/unit/io/test__generate_cubes.py old mode 100755 new mode 100644 diff --git a/lib/iris/tests/unit/io/test_save.py b/lib/iris/tests/unit/io/test_save.py old mode 100755 new mode 100644 diff --git a/noxfile.py b/noxfile.py old mode 100755 new mode 100644 From 6f09342b75b238bd03520a8fc9a68cc411f3fbd9 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 18 Dec 2023 14:32:31 +0000 Subject: [PATCH 124/134] ruff ICN rule compliance (#5652) --- .ruff.toml | 6 +-- benchmarks/benchmarks/plot.py | 4 +- lib/iris/pandas.py | 22 ++++----- lib/iris/tests/graphics/__init__.py | 8 ++-- lib/iris/tests/unit/pandas/test_pandas.py | 56 +++++++++++------------ 5 files changed, 48 insertions(+), 48 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 6ecdecfd05..f219566316 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -102,9 +102,9 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa "FA", - # flake8-import-conventions (ICN) - # https://docs.astral.sh/ruff/rules/#flake8-import-conventions-icn - "ICN", + # flake8-implicit-str-concat (ISC) + # https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc + "ISC", # flake8-logging-format (G) # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index 7942361295..b5bc064e84 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -5,14 +5,14 @@ """Plot benchmark tests. """ -import matplotlib +import matplotlib as mpl import numpy as np from iris import coords, cube, plot from . import ARTIFICIAL_DIM_SIZE -matplotlib.use("agg") +mpl.use("agg") class AuxSort: diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index fa12f087e4..a30c855fc0 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -16,7 +16,7 @@ import cftime import numpy as np import numpy.ma as ma -import pandas +import pandas as pd try: from pandas.core.indexes.datetimes import DatetimeIndex # pandas >=0.20 @@ -44,7 +44,7 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): # Convert out of NumPy's own datetime format. if np.issubdtype(values.dtype, np.datetime64): - values = pandas.to_datetime(values) + values = pd.to_datetime(values) # Convert pandas datetime objects to python datetime objects. if isinstance(values, DatetimeIndex): @@ -87,7 +87,7 @@ def _add_iris_coord(cube, name, points, dim, calendar=None): cube.add_aux_coord(coord, dim) -def _series_index_unique(pandas_series: pandas.Series): +def _series_index_unique(pandas_series: pd.Series): """Find an index grouping of a :class:`pandas.Series` that has just one Series value per group. Iterates through grouping single index levels, then combinations of 2 @@ -357,7 +357,7 @@ def as_cubes( cell_measure_cols = cell_measure_cols or [] ancillary_variable_cols = ancillary_variable_cols or [] - is_series = isinstance(pandas_structure, pandas.Series) + is_series = isinstance(pandas_structure, pd.Series) if copy: pandas_structure = pandas_structure.copy() @@ -604,7 +604,7 @@ def as_series(cube, copy=True): index = None if cube.dim_coords: index = _as_pandas_coord(cube.dim_coords[0]) - series = pandas.Series(data, index) + series = pd.Series(data, index) if not copy: _assert_shared(data, series) return series @@ -811,16 +811,16 @@ def merge_metadata(meta_var_list): # dimension over the whole DataFrame data_frame[meta_var_name] = meta_var.squeeze() else: - meta_df = pandas.DataFrame( + meta_df = pd.DataFrame( meta_var.ravel(), columns=[meta_var_name], - index=pandas.MultiIndex.from_product( + index=pd.MultiIndex.from_product( [coords[i] for i in meta_var_index], names=[coord_names[i] for i in meta_var_index], ), ) # Merge to main data frame - data_frame = pandas.merge( + data_frame = pd.merge( data_frame, meta_df, left_index=True, @@ -847,8 +847,8 @@ def merge_metadata(meta_var_list): # Extract dim coord information: separate lists for dim names and dim values coord_names, coords = _make_dim_coord_list(cube) # Make base DataFrame - index = pandas.MultiIndex.from_product(coords, names=coord_names) - data_frame = pandas.DataFrame(data.ravel(), columns=[cube.name()], index=index) + index = pd.MultiIndex.from_product(coords, names=coord_names) + data_frame = pd.DataFrame(data.ravel(), columns=[cube.name()], index=index) if add_aux_coords: data_frame = merge_metadata(_make_aux_coord_list(cube)) @@ -889,7 +889,7 @@ def merge_metadata(meta_var_list): if cube.coords(dimensions=[1]): columns = _as_pandas_coord(cube.coord(dimensions=[1])) - data_frame = pandas.DataFrame(data, index, columns) + data_frame = pd.DataFrame(data, index, columns) if not copy: _assert_shared(data, data_frame) diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index ace62248d1..84033b6466 100644 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -26,15 +26,15 @@ # Test for availability of matplotlib. # (And remove matplotlib as an iris.tests dependency.) try: - import matplotlib + import matplotlib as mpl # Override any user settings e.g. from matplotlibrc file. - matplotlib.rcdefaults() + mpl.rcdefaults() # Set backend *after* rcdefaults, as we don't want that overridden (#3846). - matplotlib.use("agg") + mpl.use("agg") # Standardise the figure size across matplotlib versions. # This permits matplotlib png image comparison. - matplotlib.rcParams["figure.figsize"] = [8.0, 6.0] + mpl.rcParams["figure.figsize"] = [8.0, 6.0] import matplotlib.pyplot as plt except ImportError: MPL_AVAILABLE = False diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index c0785459ca..34d1564c1d 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -26,18 +26,18 @@ # used by matplotlib for handling dates. default_units_registry = copy.copy(matplotlib.units.registry) try: - import pandas + import pandas as pd except ImportError: # Disable all these tests if pandas is not installed. - pandas = None + pd = None matplotlib.units.registry = default_units_registry skip_pandas = pytest.mark.skipif( - pandas is None, + pd is None, reason='Test(s) require "pandas", ' "which is not available.", ) -if pandas is not None: +if pd is not None: from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube, CubeList import iris.pandas @@ -234,7 +234,7 @@ def test_time_standard(self): days_to_2000 = 365 * 30 + 7 # pandas Timestamp class cannot handle floats in pandas 1: - index = pandas.MultiIndex.from_product(index_values, names=index_names) + index = pd.MultiIndex.from_product(index_values, names=index_names) data_length = index.nunique() else: index = None @@ -806,9 +806,9 @@ def _create_pandas(index_levels=0, is_series=False): data = np.arange(data_length) * 10 if is_series: - class_ = pandas.Series + class_ = pd.Series else: - class_ = pandas.DataFrame + class_ = pd.DataFrame return class_(data, index=index) @@ -1013,13 +1013,13 @@ def test_multi_phenom(self): assert result == [expected_cube_0, expected_cube_1] def test_empty_series(self): - series = pandas.Series(dtype=object) + series = pd.Series(dtype=object) result = iris.pandas.as_cubes(series) assert result == CubeList() def test_empty_dataframe(self): - df = pandas.DataFrame() + df = pd.DataFrame() result = iris.pandas.as_cubes(df) assert result == CubeList() @@ -1165,7 +1165,7 @@ def _test_dates_common(self, mode=None, alt_calendar=False): datetime_args = [(1971, 1, 1, i, 0, 0) for i in df.index.values] if mode == "index": values = [datetime.datetime(*a) for a in datetime_args] - df.index = pandas.Index(values, name=coord_name) + df.index = pd.Index(values, name=coord_name) elif mode == "numpy": values = [datetime.datetime(*a) for a in datetime_args] df[coord_name] = values From 67be8017bfb4bc050a5699d86707af7130bb63df Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 18 Dec 2023 14:38:06 +0000 Subject: [PATCH 125/134] ruff NPY rule compliance (#5653) --- .ruff.toml | 6 +++--- lib/iris/fileformats/pp.py | 2 +- lib/iris/tests/integration/netcdf/test_delayed_save.py | 2 +- lib/iris/tests/integration/test_regridding.py | 5 +++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index f219566316..def056b85f 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -198,9 +198,9 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flynt-fly "FLY", - # NumPy-specific rules (NPY) - # https://docs.astral.sh/ruff/rules/#numpy-specific-rules-npy - "NPY", + # Airflow (AIR) + # https://docs.astral.sh/ruff/rules/#airflow-air + "AIR", # Perflint (PERF) # https://docs.astral.sh/ruff/rules/#perflint-perf diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index f6eb3d7168..042ba6f2bc 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -1217,7 +1217,7 @@ def save(self, file_handle): ia //= PP_WORD_DEPTH else: # ia is the datalength in WORDS - ia = np.product(extra_elem.shape) + ia = np.prod(extra_elem.shape) # flip the byteorder if the data is not big-endian if extra_elem.dtype.newbyteorder(">") != extra_elem.dtype: # take a copy of the extra data when byte swapping diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index 06d04e957d..d76d57c3e1 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -69,7 +69,7 @@ def fix_array(array): dmin, dmax = 0, 255 else: dmin, dmax = array.min(), array.max() - array = np.random.uniform(dmin, dmax, size=array.shape) + array = np.random.default_rng().uniform(dmin, dmax, size=array.shape) if data_is_maskedbytes: array = array.astype("u1") diff --git a/lib/iris/tests/integration/test_regridding.py b/lib/iris/tests/integration/test_regridding.py index 833c059053..c8197a9d94 100644 --- a/lib/iris/tests/integration/test_regridding.py +++ b/lib/iris/tests/integration/test_regridding.py @@ -110,8 +110,9 @@ def test_nearest(self): class TestZonalMean_global(tests.IrisTest): def setUp(self): - np.random.seed(0) - self.src = iris.cube.Cube(np.random.randint(0, 10, size=(140, 1))) + self.src = iris.cube.Cube( + np.random.default_rng().integers(0, 10, size=(140, 1)) + ) s_crs = iris.coord_systems.GeogCS(6371229.0) sy_coord = iris.coords.DimCoord( np.linspace(-90, 90, 140), From 332a77911f2c349273916a2582ee7b6bafd74bff Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Mon, 18 Dec 2023 16:58:45 +0000 Subject: [PATCH 126/134] fix for ruff rule D400 (#5647) * fixed for ruff rule D400. * additional fixes. * update testing * noqa for the gallery. * merge fix * fix a ",." in docstring * removed D400 ignore --- .ruff.toml | 1 - benchmarks/benchmarks/aux_factory.py | 2 +- benchmarks/benchmarks/coords.py | 2 +- benchmarks/benchmarks/cube.py | 2 +- benchmarks/benchmarks/import_iris.py | 4 +++- benchmarks/benchmarks/regridding.py | 2 +- benchmarks/benchmarks/trajectory.py | 2 +- docs/gallery_code/general/plot_SOI_filtering.py | 2 +- .../general/plot_anomaly_log_colouring.py | 2 +- docs/gallery_code/general/plot_coriolis.py | 2 +- docs/gallery_code/general/plot_cross_section.py | 2 +- .../general/plot_custom_aggregation.py | 2 +- .../general/plot_custom_file_loading.py | 2 +- docs/gallery_code/general/plot_global_map.py | 2 +- docs/gallery_code/general/plot_inset.py | 2 +- .../general/plot_lineplot_with_legend.py | 2 +- docs/gallery_code/general/plot_polar_stereo.py | 2 +- docs/gallery_code/general/plot_polynomial_fit.py | 2 +- .../general/plot_projections_and_annotations.py | 2 +- .../general/plot_rotated_pole_mapping.py | 2 +- docs/gallery_code/general/plot_zonal_means.py | 2 +- docs/gallery_code/meteorology/plot_COP_1d.py | 2 +- docs/gallery_code/meteorology/plot_COP_maps.py | 2 +- docs/gallery_code/meteorology/plot_TEC.py | 2 +- .../meteorology/plot_deriving_phenomena.py | 2 +- docs/gallery_code/meteorology/plot_hovmoller.py | 2 +- .../meteorology/plot_lagged_ensemble.py | 2 +- docs/gallery_code/meteorology/plot_wind_barbs.py | 2 +- docs/gallery_code/meteorology/plot_wind_speed.py | 2 +- .../oceanography/plot_atlantic_profiles.py | 2 +- docs/gallery_code/oceanography/plot_load_nemo.py | 2 +- .../oceanography/plot_orca_projection.py | 2 +- .../documenting/docstrings_attribute.py | 2 +- lib/iris/analysis/_regrid.py | 1 + lib/iris/analysis/_scipy_interpolate.py | 4 ++-- lib/iris/analysis/geometry.py | 2 +- lib/iris/common/metadata.py | 2 +- lib/iris/coord_systems.py | 2 +- lib/iris/coords.py | 10 ++++++---- lib/iris/cube.py | 6 +++--- lib/iris/experimental/animate.py | 2 +- lib/iris/experimental/representation.py | 2 +- lib/iris/experimental/ugrid/__init__.py | 2 +- lib/iris/experimental/ugrid/mesh.py | 4 ++-- .../_structured_array_identification.py | 2 +- lib/iris/fileformats/name_loaders.py | 2 +- lib/iris/fileformats/nimrod.py | 2 +- lib/iris/fileformats/pp.py | 9 +++++---- lib/iris/fileformats/pp_save_rules.py | 2 +- lib/iris/fileformats/um/_fast_load.py | 4 +++- .../um/_fast_load_structured_fields.py | 4 +++- lib/iris/pandas.py | 2 +- lib/iris/plot.py | 2 +- lib/iris/tests/graphics/__init__.py | 2 +- lib/iris/tests/graphics/idiff.py | 2 +- lib/iris/tests/graphics/recreate_imagerepo.py | 2 +- lib/iris/tests/integration/plot/test_colorbar.py | 2 +- lib/iris/tests/stock/__init__.py | 2 +- lib/iris/tests/test_coding_standards.py | 2 +- lib/iris/tests/test_coordsystem.py | 4 ++-- lib/iris/tests/test_cube_to_pp.py | 4 ++-- lib/iris/tests/test_file_save.py | 16 ++++++++-------- lib/iris/tests/test_intersect.py | 2 +- lib/iris/tests/test_netcdf.py | 4 ++-- lib/iris/tests/test_plot.py | 4 ++-- lib/iris/tests/test_std_names.py | 2 +- lib/iris/tests/test_util.py | 2 +- .../cartography/test__get_lon_lat_coords.py | 2 +- .../analysis/cartography/test__quadrant_area.py | 2 +- .../unit/analysis/cartography/test__xy_range.py | 2 +- .../analysis/cartography/test_area_weights.py | 2 +- .../test_RectilinearInterpolator.py | 2 +- .../unit/common/lenient/test__lenient_client.py | 8 ++++---- .../unit/common/lenient/test__lenient_service.py | 8 ++++---- lib/iris/tests/unit/coords/test_Cell.py | 2 +- lib/iris/tests/unit/cube/test_Cube.py | 10 +++++++--- .../tests/unit/cube/test_Cube__aggregated_by.py | 2 +- .../nc_load_rules/helpers/test_get_names.py | 2 +- lib/iris/tests/unit/fileformats/test_rules.py | 2 +- lib/iris/tests/unit/pandas/test_pandas.py | 6 +++--- lib/iris/tests/unit/plot/test_plot.py | 4 ++-- lib/iris/tests/unit/util/test__coord_regular.py | 1 + lib/iris/tests/unit/util/test__mask_array.py | 2 +- .../tests/unit/util/test_equalise_attributes.py | 2 +- .../tests/unit/util/test_find_discontiguities.py | 2 +- lib/iris/tests/unit/util/test_mask_cube.py | 2 +- lib/iris/util.py | 2 +- 87 files changed, 131 insertions(+), 117 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index def056b85f..6eff54c7de 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -42,7 +42,6 @@ lint.ignore = [ "D211", # No blank lines allowed before class docstring "D214", # Section is over-indented "D300", # triple double quotes `""" / Use triple single quotes `'''` - "D400", # First line should end with a period "D401", # First line of docstring should be in imperative mood: ... "D403", # First word of the first line should be capitalized "D404", # First word of the docstring should not be "This" diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index 49dc6a368c..aed50c2854 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -29,7 +29,7 @@ class FactoryCommon: """ def setup(self): - """Prevent ASV instantiating (must therefore override setup() in any subclasses.)""" + """Prevent ASV instantiating (must therefore override setup() in any subclasses.).""" raise NotImplementedError def setup_common(self): diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index 1c8f49967a..930cddb4f1 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -35,7 +35,7 @@ class CoordCommon: """ def setup(self): - """Prevent ASV instantiating (must therefore override setup() in any subclasses.)""" + """Prevent ASV instantiating (must therefore override setup() in any subclasses.).""" raise NotImplementedError def setup_common(self): diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index f11e135996..8fcea35499 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -42,7 +42,7 @@ class ComponentCommon: """ def setup(self): - """Prevent ASV instantiating (must therefore override setup() in any subclasses.)""" + """Prevent ASV instantiating (must therefore override setup() in any subclasses.).""" raise NotImplementedError def create(self): diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index 37c98bee09..fbae82fee6 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -29,7 +29,9 @@ class Iris: @staticmethod def _import(module_name, reset_colormaps=False): """Have experimented with adding sleep() commands into the imported - modules. The results reveal: + modules. + + The results reveal: ASV avoids invoking `import x` if nothing gets called in the benchmark (some imports were timed, but only those where calls diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py index 1f8f2e3740..8e1c5e33df 100644 --- a/benchmarks/benchmarks/regridding.py +++ b/benchmarks/benchmarks/regridding.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Regridding benchmark test +"""Regridding benchmark test. """ diff --git a/benchmarks/benchmarks/trajectory.py b/benchmarks/benchmarks/trajectory.py index 79466d3804..4214ed3f6e 100644 --- a/benchmarks/benchmarks/trajectory.py +++ b/benchmarks/benchmarks/trajectory.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Trajectory benchmark test +"""Trajectory benchmark test. """ diff --git a/docs/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py index 372369d450..9c4fad51bd 100644 --- a/docs/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -18,7 +18,7 @@ Trenberth K. E. (1984) Signal Versus Noise in the Southern Oscillation. Monthly Weather Review, Vol 112, pp 326-332 -""" +""" # noqa: D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py index af15868387..98751b0dbf 100644 --- a/docs/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -22,7 +22,7 @@ and :obj:`matplotlib.pyplot.pcolormesh`). See also: https://en.wikipedia.org/wiki/False_color#Pseudocolor. -""" +""" # noqa: D400 import cartopy.crs as ccrs import matplotlib.colors as mcols diff --git a/docs/gallery_code/general/plot_coriolis.py b/docs/gallery_code/general/plot_coriolis.py index 37be139dd3..3d5aa853e3 100644 --- a/docs/gallery_code/general/plot_coriolis.py +++ b/docs/gallery_code/general/plot_coriolis.py @@ -5,7 +5,7 @@ associated metadata. It then plots the Coriolis frequency on an orthographic projection. -""" +""" # noqa: D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_cross_section.py b/docs/gallery_code/general/plot_cross_section.py index f4fc0a2ecc..8eac3955b1 100644 --- a/docs/gallery_code/general/plot_cross_section.py +++ b/docs/gallery_code/general/plot_cross_section.py @@ -4,7 +4,7 @@ This example demonstrates contour plots of a cross-sectioned multi-dimensional cube which features a hybrid height vertical coordinate system. -""" +""" # noqa: D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py index e42144d777..521d0069b9 100644 --- a/docs/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -11,7 +11,7 @@ over North America, and we want to calculate in how many years these exceed a certain temperature over a spell of 5 years or more. -""" +""" # noqa: D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index d76ea877b2..53781ba044 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -52,7 +52,7 @@ function which automatically invokes the ``FormatSpecification`` we defined. The cube returned from the load function is then used to produce a plot. -""" +""" # noqa: D400 import datetime diff --git a/docs/gallery_code/general/plot_global_map.py b/docs/gallery_code/general/plot_global_map.py index dbf05d773d..9634fc1458 100644 --- a/docs/gallery_code/general/plot_global_map.py +++ b/docs/gallery_code/general/plot_global_map.py @@ -4,7 +4,7 @@ This example demonstrates a contour plot of global air temperature. The plot title and the labels for the axes are automatically derived from the metadata. -""" +""" # noqa: D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_inset.py b/docs/gallery_code/general/plot_inset.py index b173fb2044..7b7e04c7d3 100644 --- a/docs/gallery_code/general/plot_inset.py +++ b/docs/gallery_code/general/plot_inset.py @@ -5,7 +5,7 @@ and longitude dimensions to plot a temperature series for a single latitude coordinate, with an inset plot of the data region. -""" +""" # noqa: D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py index 81b89c8d55..981e9694ec 100644 --- a/docs/gallery_code/general/plot_lineplot_with_legend.py +++ b/docs/gallery_code/general/plot_lineplot_with_legend.py @@ -1,7 +1,7 @@ """Multi-Line Temperature Profile Plot ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -""" +""" # noqa: D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_polar_stereo.py b/docs/gallery_code/general/plot_polar_stereo.py index 27201018d4..0cb4e533fa 100644 --- a/docs/gallery_code/general/plot_polar_stereo.py +++ b/docs/gallery_code/general/plot_polar_stereo.py @@ -4,7 +4,7 @@ Demonstrates plotting data that are defined on a polar stereographic projection. -""" +""" # noqa: D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_polynomial_fit.py b/docs/gallery_code/general/plot_polynomial_fit.py index 10e844af1a..951f17209e 100644 --- a/docs/gallery_code/general/plot_polynomial_fit.py +++ b/docs/gallery_code/general/plot_polynomial_fit.py @@ -5,7 +5,7 @@ cube, adding the fit to the cube's metadata, and plotting both the 1D data and the fit. -""" +""" # noqa: D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index 7ddee9531c..1a4701837a 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -11,7 +11,7 @@ We plot these over a specified region, in two different map projections. -""" +""" # noqa: D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py index 0233ade6a7..3674e89e28 100644 --- a/docs/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -9,7 +9,7 @@ * Block plot of contiguous bounded data * Non native projection and a Natural Earth shaded relief image underlay -""" +""" # noqa: D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py index 3f6e043547..47a7127d06 100644 --- a/docs/gallery_code/general/plot_zonal_means.py +++ b/docs/gallery_code/general/plot_zonal_means.py @@ -1,7 +1,7 @@ """Zonal Mean Diagram of Air Temperature ===================================== This example demonstrates aligning a linear plot and a cartographic plot using Matplotlib. -""" +""" # noqa: D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index 6cdbd50114..1f56a7b293 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -26,7 +26,7 @@ Further details on the aggregation functionality being used in this example can be found in :ref:`cube-statistics`. -""" +""" # noqa: D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 108407c09e..714ee8896b 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -19,7 +19,7 @@ Analyses, and Scenarios. Eos Trans. AGU, Vol 90, No. 21, doi:10.1029/2009EO210001. -""" +""" # noqa: D400 import os.path diff --git a/docs/gallery_code/meteorology/plot_TEC.py b/docs/gallery_code/meteorology/plot_TEC.py index 50619ca870..cb642af588 100644 --- a/docs/gallery_code/meteorology/plot_TEC.py +++ b/docs/gallery_code/meteorology/plot_TEC.py @@ -8,7 +8,7 @@ The plot exhibits an interesting outline effect due to excluding data values below a certain threshold. -""" +""" # noqa: D400 import matplotlib.pyplot as plt import numpy.ma as ma diff --git a/docs/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py index bd9775e657..ef78d2f1c9 100644 --- a/docs/gallery_code/meteorology/plot_deriving_phenomena.py +++ b/docs/gallery_code/meteorology/plot_deriving_phenomena.py @@ -7,7 +7,7 @@ specific humidity. Finally, the two new cubes are presented side-by-side in a plot. -""" +""" # noqa: D400 import matplotlib.pyplot as plt import matplotlib.ticker diff --git a/docs/gallery_code/meteorology/plot_hovmoller.py b/docs/gallery_code/meteorology/plot_hovmoller.py index 2e76be98d6..6c1f1a800a 100644 --- a/docs/gallery_code/meteorology/plot_hovmoller.py +++ b/docs/gallery_code/meteorology/plot_hovmoller.py @@ -6,7 +6,7 @@ and has been pre-processed to calculate the monthly mean sea surface temperature. -""" +""" # noqa: D400 import matplotlib.dates as mdates import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index 32798c124f..a8887238d4 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -15,7 +15,7 @@ better approach would be to take the climatological mean, calibrated to the model, from each ensemble member. -""" +""" # noqa: D400 import matplotlib.pyplot as plt import matplotlib.ticker diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index 4f776144e6..9745a40db2 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -8,7 +8,7 @@ The magnitude of the wind in the original data is low and so doesn't illustrate the full range of barbs. The wind is scaled to simulate a storm that better illustrates the range of barbs that are available. -""" +""" # noqa: D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index a1820e980e..dde87824fd 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -8,7 +8,7 @@ For the second plot, the data used for the arrows is normalised to produce arrows with a uniform size on the plot. -""" +""" # noqa: D400 import cartopy.feature as cfeat import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py index 3886c61aff..736ddbe7fb 100644 --- a/docs/gallery_code/oceanography/plot_atlantic_profiles.py +++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py @@ -13,7 +13,7 @@ presence of the attribute positive=down on the depth coordinate. This means depth values intuitively increase downward on the y-axis. -""" +""" # noqa: D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index 52fbb33484..77c95e2353 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -5,7 +5,7 @@ the NEMO model and combine them into a time series in a single cube. The different time dimensions in these files can prevent Iris from concatenating them without the intervention shown here. -""" +""" # noqa: D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_orca_projection.py b/docs/gallery_code/oceanography/plot_orca_projection.py index d9968de0ae..33e3ecac46 100644 --- a/docs/gallery_code/oceanography/plot_orca_projection.py +++ b/docs/gallery_code/oceanography/plot_orca_projection.py @@ -9,7 +9,7 @@ Second four pcolormesh plots are created from this projected dataset, using different projections for the output image. -""" +""" # noqa: D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/src/developers_guide/documenting/docstrings_attribute.py b/docs/src/developers_guide/documenting/docstrings_attribute.py index 18bb0c9c6f..9485ca5af7 100644 --- a/docs/src/developers_guide/documenting/docstrings_attribute.py +++ b/docs/src/developers_guide/documenting/docstrings_attribute.py @@ -1,5 +1,5 @@ class ExampleClass: - """Class Summary""" + """Class Summary.""" def __init__(self, arg1, arg2): """Purpose section description. diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 61b4d31204..3e9074fc6b 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -969,6 +969,7 @@ def _create_cube(data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback) - Horizontal coordinates are copied from the target cube. - Auxiliary coordinates which span the grid dimensions are ignored. + Parameters ---------- data : array diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index 1300da6d89..bf1796f91e 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -41,7 +41,7 @@ def _ndim_coords_from_arrays(points, ndim=None): # 9aeaafb32/scipy/interpolate/interpolate.py#L1400 class _RegularGridInterpolator: - """Interpolation on a regular grid in arbitrary dimensions + """Interpolation on a regular grid in arbitrary dimensions. The data must be defined on a regular grid; the grid spacing however may be uneven. Linear and nearest-neighbour interpolation are supported. After @@ -139,7 +139,7 @@ def __init__( self.values = values def __call__(self, xi, method=None): - """Interpolation at coordinates + """Interpolation at coordinates. Parameters ---------- diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index 21c7d05943..a07ef61a76 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -21,7 +21,7 @@ def _extract_relevant_cube_slice(cube, geometry): """Given a shapely geometry object, this helper method returns the tuple (subcube, x_coord_of_subcube, y_coord_of_subcube, - (min_x_index, min_y_index, max_x_index, max_y_index)) + (min_x_index, min_y_index, max_x_index, max_y_index)). If cube and geometry don't overlap, returns None. diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 3b2e909e9e..691e427aa5 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -1229,7 +1229,7 @@ def _difference_strict_attributes(left, right): class DimCoordMetadata(CoordMetadata): - """Metadata container for a :class:`~iris.coords.DimCoord`""" + """Metadata container for a :class:`~iris.coords.DimCoord`.""" # The "circular" member is stateful only, and does not participate # in lenient/strict equivalence. diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 0ecfae0fb7..60124044f1 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -53,7 +53,7 @@ class CoordSystem(metaclass=ABCMeta): grid_mapping_name = None def __eq__(self, other): - """Override equality + """Override equality. The `_globe` and `_crs` attributes are not compared because they are cached properties and completely derived from other attributes. The diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 08bb5ee7d9..9336f1f4c7 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -630,7 +630,7 @@ def __hash__(self): return hash(id(self)) def __binary_operator__(self, other, mode_constant): - """Common code which is called by add, sub, mul and div + """Common code which is called by add, sub, mul and div. Mode constant is one of ADD, SUB, MUL, DIV, RDIV @@ -2470,7 +2470,7 @@ def xml_element(self, doc): return element def _xml_id_extra(self, unique_value): - """Coord specific stuff for the xml id""" + """Coord specific stuff for the xml id.""" unique_value += str(self.coord_system).encode("utf-8") + b"\0" return unique_value @@ -2934,7 +2934,9 @@ class CellMethod(iris.util._OrderedHashable): comments = None def __init__(self, method, coords=None, intervals=None, comments=None): - """Args: + """Call Method initialise. + + Args: * method: The name of the operation. @@ -2993,7 +2995,7 @@ def __init__(self, method, coords=None, intervals=None, comments=None): self._init(method, tuple(_coords), tuple(_intervals), tuple(_comments)) def __str__(self): - """Return a custom string representation of CellMethod""" + """Return a custom string representation of CellMethod.""" # Group related coord names intervals and comments together coord_string = " ".join([f"{coord}:" for coord in self.coord_names]) method_string = str(self.method) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index aff9ad5d0d..b6d2f7fd7e 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -188,14 +188,14 @@ def __add__(self, other): return CubeList(list.__add__(self, other)) def __getitem__(self, keys): - """x.__getitem__(y) <==> x[y]""" + """x.__getitem__(y) <==> x[y].""" result = super().__getitem__(keys) if isinstance(result, list): result = CubeList(result) return result def __getslice__(self, start, stop): - """x.__getslice__(i, j) <==> x[i:j] + """x.__getslice__(i, j) <==> x[i:j]. Use of negative indices is not supported. @@ -209,7 +209,7 @@ def __iadd__(self, other_cubes): return super(CubeList, self).__iadd__(CubeList(other_cubes)) def __setitem__(self, key, cube_or_sequence): - """Set self[key] to cube or sequence of cubes""" + """Set self[key] to cube or sequence of cubes.""" if isinstance(key, int): # should have single cube. self._assert_is_cube(cube_or_sequence) diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py index b660ae2a1a..5c9fa77bf8 100644 --- a/lib/iris/experimental/animate.py +++ b/lib/iris/experimental/animate.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Wrapper for animating iris cubes using iris or matplotlib plotting functions +"""Wrapper for animating iris cubes using iris or matplotlib plotting functions. Notes ----- diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 9acd926eb2..fd063a5475 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -202,7 +202,7 @@ def _make_shapes_row(self): def _make_row(self, title, body=None, col_span=0): """Produce one row for the table body; i.e. - Coord namex-... + Coord namex-.... `body` contains the content for each cell not in the left-most (title) column. diff --git a/lib/iris/experimental/ugrid/__init__.py b/lib/iris/experimental/ugrid/__init__.py index 58695912c6..7cae55a1bd 100644 --- a/lib/iris/experimental/ugrid/__init__.py +++ b/lib/iris/experimental/ugrid/__init__.py @@ -4,7 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Infra-structure for unstructured mesh support, based on -CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/ +CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/. .. note:: diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 5d320384b7..98fd08d643 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -1953,7 +1953,7 @@ def topology_dimension(self): class _Mesh1DCoordinateManager: """TBD: require clarity on coord_systems validation TBD: require clarity on __eq__ support - TBD: rationalise self.coords() logic with other manager and Cube + TBD: rationalise self.coords() logic with other manager and Cube. """ @@ -2141,7 +2141,7 @@ def _add(self, coords): def add(self, node_x=None, node_y=None, edge_x=None, edge_y=None): """use self.remove(edge_x=True) to remove a coordinate e.g., using the - pattern self.add(edge_x=None) will not remove the edge_x coordinate + pattern self.add(edge_x=None) will not remove the edge_x coordinate. """ self._add(MeshNodeCoords(node_x, node_y)) diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index e9f8d36324..05bd04036b 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -291,7 +291,7 @@ class GroupStructure: """ def __init__(self, length, component_structure, array_order="c"): - """group_component_to_array - a dictionary. See also TODO""" + """group_component_to_array - a dictionary. See also TODO.""" #: The size common to all of the original arrays and used to determine #: possible shape configurations. self.length = length diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index 8e1a9f5f9d..e6acb6aa66 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -211,7 +211,7 @@ def _calc_integration_period(time_avgs): def _parse_units(units): - """Return a known :class:`cf_units.Unit` given a NAME unit + """Return a known :class:`cf_units.Unit` given a NAME unit. .. note:: diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index b070b80d69..3aea8b8b81 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -240,7 +240,7 @@ def _read_header(self, infile): ) def _read_data(self, infile): - """Read the data array: int8, int16, int32 or float32 + """Read the data array: int8, int16, int32 or float32. (surrounded by 4-byte length, at start and end) diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 042ba6f2bc..a654bfde6f 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -973,7 +973,7 @@ def __repr__(self): @property def stash(self): """Stash property giving access to the associated STASH object, - now supporting __eq__ + now supporting __eq__. """ if ( @@ -1052,7 +1052,7 @@ def lbproc(self, value): @property def data(self): """:class:`numpy.ndarray` representing the multidimensional data - of the pp file + of the pp file. """ if is_lazy_data(self._data): @@ -1126,7 +1126,8 @@ def y_bounds(self): def save(self, file_handle): """Save the PPField to the given file object. - (typically created with :func:`open`):: + + Typically created with :func:`open`:: # to append the field to a file with open(filename, 'ab') as fh: @@ -1976,7 +1977,7 @@ def _make_func(stashobj): def pp_filter(field): """Return True if field is to be kept, - False if field does not match filter + False if field does not match filter. """ res = True diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 376c4a3632..60eef7ad96 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -815,7 +815,7 @@ def _vertical_rules(cube, pp): def _all_other_rules(cube, pp): - """Fields currently managed by these rules: + """Fields currently managed by these rules. * lbfc (field code) * lbrsvd[3] (ensemble member number) diff --git a/lib/iris/fileformats/um/_fast_load.py b/lib/iris/fileformats/um/_fast_load.py index 6ab4f20374..477a221727 100644 --- a/lib/iris/fileformats/um/_fast_load.py +++ b/lib/iris/fileformats/um/_fast_load.py @@ -47,7 +47,9 @@ class FieldCollation(BasicFieldCollation): # class, now renamed 'BasicFieldCollation'. def __init__(self, fields, filepath): - """Args: + """FieldCollation initialise. + + Args: * fields (iterable of :class:`iris.fileformats.pp.PPField`): The fields in the collation. diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py index 26f2816891..41ec8720bc 100644 --- a/lib/iris/fileformats/um/_fast_load_structured_fields.py +++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py @@ -45,7 +45,9 @@ class BasicFieldCollation: """ def __init__(self, fields): - """Args: + """BasicFieldCollation initialise. + + Args: * fields (iterable of :class:`iris.fileformats.pp.PPField`): The fields in the collation. diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index a30c855fc0..ed33990f5c 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -803,7 +803,7 @@ def as_data_frame( """ def merge_metadata(meta_var_list): - """Add auxiliary cube metadata to the DataFrame""" + """Add auxiliary cube metadata to the DataFrame.""" nonlocal data_frame for meta_var_name, meta_var_index, meta_var in meta_var_list: if not meta_var_index: diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 667af06af1..b4e20c57af 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -255,7 +255,7 @@ def ticker_func(tick_location, _): def _invert_yaxis(v_coord, axes=None): - """Inverts the y-axis of the current plot based on conditions: + """Inverts the y-axis of the current plot based on conditions. * If the y-axis is already inverted we don't want to re-invert it. * If v_coord is None then it will not have any attributes. diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 84033b6466..a1b6b24bcc 100644 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Contains Iris graphic testing utilities +"""Contains Iris graphic testing utilities. By default, this module sets the matplotlib backend to "agg". But when this module is imported it checks ``sys.argv`` for the flag "-d". If diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index 41cf20964f..2e2ef75776 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -35,7 +35,7 @@ def extract_test_key(result_image_name): - """Extracts the name of the test which a result image refers to""" + """Extracts the name of the test which a result image refers to.""" name_match = _RESULT_NAME_PATTERN.match(str(result_image_name)) if name_match: test_key = name_match.group(1) diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py index 6056eb058b..ae12eb447d 100755 --- a/lib/iris/tests/graphics/recreate_imagerepo.py +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -3,7 +3,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Updates imagerepo.json based on the baseline images +"""Updates imagerepo.json based on the baseline images. """ diff --git a/lib/iris/tests/integration/plot/test_colorbar.py b/lib/iris/tests/integration/plot/test_colorbar.py index 9aa856934c..4a3fd27a80 100644 --- a/lib/iris/tests/integration/plot/test_colorbar.py +++ b/lib/iris/tests/integration/plot/test_colorbar.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Test interaction between :mod:`iris.plot` and -:func:`matplotlib.pyplot.colorbar` +:func:`matplotlib.pyplot.colorbar`. """ diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index ca5adb21fc..894cc1d02c 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -650,7 +650,7 @@ def realistic_4d(): def realistic_4d_no_derived(): - """Returns a realistic 4d cube without hybrid height + """Returns a realistic 4d cube without hybrid height. >>> print(repr(realistic_4d())) Date: Mon, 18 Dec 2023 21:33:02 +0000 Subject: [PATCH 127/134] fix for ruff rule D214. (#5654) --- .ruff.toml | 1 - lib/iris/common/resolve.py | 6 +++--- lib/iris/cube.py | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 6eff54c7de..38dcd2af3f 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -40,7 +40,6 @@ lint.ignore = [ "D208", # Docstring is over-indented "D209", # Multi-line docstring closing quotes should be on a separate line "D211", # No blank lines allowed before class docstring - "D214", # Section is over-indented "D300", # triple double quotes `""" / Use triple single quotes `'''` "D401", # First line of docstring should be in imperative mood: ... "D403", # First word of the first line should be capitalized diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 045dc7b549..9124483dac 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -205,7 +205,7 @@ class Resolve: >>> resolver = Resolve(cube1, cube2) >>> results = [resolver.cube(data) for data in payload] - """ + """ # noqa: D214 def __init__(self, lhs=None, rhs=None): """Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and @@ -2493,7 +2493,7 @@ def mapped(self): >>> resolver.map_rhs_to_lhs False - """ + """ # noqa: D214 result = None if self.mapping is not None: result = self._src_cube.ndim == len(self.mapping) @@ -2554,5 +2554,5 @@ def shape(self): >>> Resolve(cube2, cube1).shape (240, 37, 49) - """ + """ # noqa: D214 return self._broadcast_shape diff --git a/lib/iris/cube.py b/lib/iris/cube.py index b6d2f7fd7e..cb9fc02520 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -4521,7 +4521,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): Notice that the forecast_period dimension now represents the 4 possible windows of size 3 from the original cube. - """ + """ # noqa: D214 # Update weights kwargs (if necessary) to handle different types of # weights weights_info = None From 4bf0ea4e5d4982fa9829fabb9a93da5ddd715d9c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 21:47:04 +0000 Subject: [PATCH 128/134] [pre-commit.ci] pre-commit autoupdate (#5655) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.7 → v0.1.8](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.7...v0.1.8) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b8289b9583..d4fc3eedd9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.1.7" + rev: "v0.1.8" hooks: - id: ruff types: [file, python] From 203d6d20a26400231f8dd8130ab48fc1bb9c7708 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Tue, 19 Dec 2023 16:51:33 +0000 Subject: [PATCH 129/134] ruff rule D208 compliance. (#5656) --- .ruff.toml | 1 - .../unit/analysis/geometry/test_geometry_area_weights.py | 5 +---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 38dcd2af3f..96aedee6c6 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -37,7 +37,6 @@ lint.ignore = [ "D200", # One-line docstring should fit on one line "D202", # No blank lines allowed after function docstring "D205", # 1 blank line required between summary line and description - "D208", # Docstring is over-indented "D209", # Multi-line docstring closing quotes should be on a separate line "D211", # No blank lines allowed before class docstring "D300", # triple double quotes `""" / Use triple single quotes `'''` diff --git a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py index 29b3ba8c7e..5d7d39dfc4 100644 --- a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py +++ b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py @@ -2,10 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.analysis.geometry.geometry_area_weights` -function. - - """ +"""Unit tests for the :func:`iris.analysis.geometry.geometry_area_weights` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. From 52fd4ab0e39bbedbf24c8484231092a1f4f0b9a1 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Tue, 19 Dec 2023 16:53:25 +0000 Subject: [PATCH 130/134] ruff rule D411 compliance. (#5657) * ruff rule D411 complliance. * removed comment * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .ruff.toml | 1 - lib/iris/analysis/_regrid.py | 1 + lib/iris/common/resolve.py | 6 +++--- lib/iris/cube.py | 2 +- lib/iris/fileformats/dot.py | 12 ++++++------ 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 96aedee6c6..8d65204a28 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -48,7 +48,6 @@ lint.ignore = [ "D407", # Missing dashed underline after section "D409", # Section underline should match the length of its name "D410", # Missing blank line after section - "D411", # Missing blank line before section "D412", # No blank lines allowed between a section header and its content # pyupgrade (UP) diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 3e9074fc6b..b2ce99ee7a 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -988,6 +988,7 @@ def _create_cube(data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback) regrid_callback : callable The routine that will be used to calculate the interpolated values of any reference surfaces. + Returns ------- cube diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 9124483dac..ec73792915 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -205,7 +205,7 @@ class Resolve: >>> resolver = Resolve(cube1, cube2) >>> results = [resolver.cube(data) for data in payload] - """ # noqa: D214 + """ # noqa: D214, D411 def __init__(self, lhs=None, rhs=None): """Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and @@ -2493,7 +2493,7 @@ def mapped(self): >>> resolver.map_rhs_to_lhs False - """ # noqa: D214 + """ # noqa: D214, D411 result = None if self.mapping is not None: result = self._src_cube.ndim == len(self.mapping) @@ -2554,5 +2554,5 @@ def shape(self): >>> Resolve(cube2, cube1).shape (240, 37, 49) - """ # noqa: D214 + """ # noqa: D214, D411 return self._broadcast_shape diff --git a/lib/iris/cube.py b/lib/iris/cube.py index cb9fc02520..44ca2777bb 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -4521,7 +4521,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): Notice that the forecast_period dimension now represents the 4 possible windows of size 3 from the original cube. - """ # noqa: D214 + """ # noqa: D214, D411 # Update weights kwargs (if necessary) to handle different types of # weights weights_info = None diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 6175dd553f..ff0e26bf84 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -337,16 +337,16 @@ def _coord_system_text(cs, uid): def _dot_node(indent, id, name, attributes): """Return a string containing the dot representation for a single node. - Args - ---- - id + Parameters + ---------- + id : The ID of the node. - name + name : The visual name of the node. - attributes + attributes: An iterable of (name, value) attribute pairs. - """ + """ # noqa: D411 attributes = r"\n".join("%s: %s" % item for item in attributes) template = """%(indent)s"%(id)s" [ %(indent)s label = "%(name)s|%(attributes)s" From 9ab06372cad5f1dd3e235c63f4c4032e1267f8b0 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Wed, 20 Dec 2023 13:10:15 +0000 Subject: [PATCH 131/134] ruff rule D412 compliance. (#5660) * ruff rule D412 complliance. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update _optimal_array_structuring.py remove trailing whitespace. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .ruff.toml | 1 - .../general/plot_SOI_filtering.py | 1 - docs/gallery_code/meteorology/plot_COP_1d.py | 1 - .../gallery_code/meteorology/plot_COP_maps.py | 1 - lib/iris/analysis/_grid_angles.py | 67 ++++++++++--------- lib/iris/analysis/_interpolation.py | 7 +- lib/iris/analysis/cartography.py | 4 +- lib/iris/analysis/maths.py | 8 --- lib/iris/coord_systems.py | 2 - lib/iris/cube.py | 30 ++++----- .../fileformats/_nc_load_rules/helpers.py | 15 ++--- lib/iris/fileformats/cf.py | 5 +- lib/iris/fileformats/pp_load_rules.py | 19 +++--- .../um/_optimal_array_structuring.py | 6 +- lib/iris/util.py | 34 +++++----- 15 files changed, 90 insertions(+), 111 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 8d65204a28..f9e3770410 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -48,7 +48,6 @@ lint.ignore = [ "D407", # Missing dashed underline after section "D409", # Section underline should match the length of its name "D410", # Missing blank line after section - "D412", # No blank lines allowed between a section header and its content # pyupgrade (UP) # https://docs.astral.sh/ruff/rules/#pyupgrade-up diff --git a/docs/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py index 9c4fad51bd..5082162068 100644 --- a/docs/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -11,7 +11,6 @@ References ---------- - Duchon C. E. (1979) Lanczos Filtering in One and Two Dimensions. Journal of Applied Meteorology, Vol 18, pp 1016-1022. diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index 1f56a7b293..d5ef2a9990 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -11,7 +11,6 @@ References ---------- - Johns T.C., et al. (2011) Climate change under aggressive mitigation: the ENSEMBLES multi-model experiment. Climate Dynamics, Vol 37, No. 9-10, doi:10.1007/s00382-011-1005-5. diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 714ee8896b..eda8a3a53a 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -9,7 +9,6 @@ References ---------- - Johns T.C., et al. (2011) Climate change under aggressive mitigation: the ENSEMBLES multi-model experiment. Climate Dynamics, Vol 37, No. 9-10, doi:10.1007/s00382-011-1005-5. diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 6a0ba3e1a4..d50f55125f 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -22,9 +22,9 @@ def _3d_xyz_from_latlon(lon, lat): Arrays of longitudes and latitudes, in degrees. Both the same shape. - Returns: - - * xyz : (array, dtype=float64) + Returns + ------- + xyz : array, dtype=float64 Cartesian coordinates on a unit sphere. Shape is (3, ). The x / y / z coordinates are in xyz[0 / 1 / 2]. @@ -52,9 +52,9 @@ def _latlon_from_xyz(xyz): Shape (3, ). x / y / z values are in xyz[0 / 1 / 2], - Returns: - - * lonlat : (array) + Returns + ------- + lonlat : array longitude and latitude position angles, in degrees. Shape (2, ). The longitudes / latitudes are in lonlat[0 / 1]. @@ -104,9 +104,9 @@ def _angle(p, q, r): Shape is (2, ). Longitudes / latitudes are in array[0 / 1]. - Returns: - - * angle : (float array) + Returns + ------- + angle : float array Grid angles relative to true-East, in degrees. Positive when grid-East is anticlockwise from true-East. Shape is same as . @@ -178,23 +178,22 @@ def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"): takes an angles between the average of the left-hand and right-hand pairs of corners. The default is 'mid-lhs, mid-rhs'. - Returns: - - angles : (2-dimensional cube) - - Cube of angles of grid-x vector from true Eastward direction for - each gridcell, in degrees. - It also has "true" longitude and latitude coordinates, with no - coordinate system. - When the input has coords, then the output ones are identical if - the inputs are true-latlons, otherwise they are transformed - true-latlon versions. - When the input has bounded coords, then the output coords have - matching bounds and centrepoints (possibly transformed). - When the input is 2d arrays, or has unbounded coords, then the - output coords have matching points and no bounds. - When the input is 3d arrays, then the output coords have matching - bounds, and the centrepoints are an average of the 4 boundpoints. + Returns + ------- + angles : 2-dimensional cube + Cube of angles of grid-x vector from true Eastward direction for + each gridcell, in degrees. + It also has "true" longitude and latitude coordinates, with no + coordinate system. + When the input has coords, then the output ones are identical if + the inputs are true-latlons, otherwise they are transformed + true-latlon versions. + When the input has bounded coords, then the output coords have + matching bounds and centrepoints (possibly transformed). + When the input is 2d arrays, or has unbounded coords, then the + output coords have matching points and no bounds. + When the input is 3d arrays, then the output coords have matching + bounds, and the centrepoints are an average of the 4 boundpoints. """ cube = None @@ -423,15 +422,17 @@ def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwarg Additional keyword args to be passed to the :func:`gridcell_angles` method, if it is used. - Returns: - - true_u, true_v : (cube) - Cubes of true-north oriented vector components. - Units are same as inputs. + Returns + ------- + true_u, true_v : cube + Cubes of true-north oriented vector components. + Units are same as inputs. - .. Note:: + Notes + ----- + .. note:: - Vector magnitudes will always be the same as the inputs. + Vector magnitudes will always be the same as the inputs. .. note:: diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index 7c28d24efa..b6e443c95c 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -296,9 +296,10 @@ def _interpolate(self, data, interp_points): The other (leading) dimensions index over the different required sample points. - Returns: - - A :class:`np.ndarray`. Its shape is "points_shape + extra_shape", + Returns + ------- + :class:`np.ndarray`. + Its shape is "points_shape + extra_shape", where "extra_shape" is the remaining non-interpolated dimensions of the data array (i.e. 'data.shape[N:]'), and "points_shape" is the leading dimensions of interp_points, diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 4da4e32ad7..c4a71e97e8 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -482,8 +482,8 @@ def cosine_latitude_weights(cube): w_l = \cos \phi_l - Examples: - + Examples + -------- Compute weights suitable for averaging type operations:: from iris.analysis.cartography import cosine_latitude_weights diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 1042b145de..dc0b2158b6 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -218,7 +218,6 @@ def add(cube, other, dim=None, in_place=False): Parameters ---------- - cube : iris.cube.Cube First operand to add. @@ -234,7 +233,6 @@ def add(cube, other, dim=None, in_place=False): Returns ------- - iris.cube.Cube Notes @@ -273,7 +271,6 @@ def subtract(cube, other, dim=None, in_place=False): Parameters ---------- - cube : iris.cube.Cube Cube from which to subtract. @@ -289,7 +286,6 @@ def subtract(cube, other, dim=None, in_place=False): Returns ------- - iris.cube.Cube Notes @@ -378,7 +374,6 @@ def multiply(cube, other, dim=None, in_place=False): Parameters ---------- - cube : iris.cube.Cube First operand to multiply. @@ -394,7 +389,6 @@ def multiply(cube, other, dim=None, in_place=False): Returns ------- - iris.cube.Cube Notes @@ -464,7 +458,6 @@ def divide(cube, other, dim=None, in_place=False): Parameters ---------- - cube : iris.cube.Cube Numerator. @@ -480,7 +473,6 @@ def divide(cube, other, dim=None, in_place=False): Returns ------- - iris.cube.Cube Notes diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 60124044f1..33214ef5e0 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -999,7 +999,6 @@ def __init__( Parameters ---------- - central_lat : float The latitude of the pole. @@ -1113,7 +1112,6 @@ def __init__( Parameters ---------- - central_lat : {90, -90} The latitude of the pole. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 44ca2777bb..3b53eaee9d 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -772,7 +772,6 @@ class CubeAttrsDict(MutableMapping): Examples -------- - >>> from iris.cube import Cube >>> cube = Cube([0]) >>> # CF defines 'history' as global by default. @@ -829,7 +828,6 @@ def __init__( Examples -------- - >>> from iris.cube import CubeAttrsDict >>> # CF defines 'history' as global by default. >>> CubeAttrsDict({'history': 'data-story', 'comment': 'this-cube'}) @@ -2364,11 +2362,11 @@ def mesh(self): :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. - Returns: - - * mesh (:class:`iris.experimental.ugrid.mesh.Mesh` or None): + Returns + ------- + mesh : :class:`iris.experimental.ugrid.mesh.Mesh` or None The mesh of the cube - :class:`~iris.experimental.ugrid.MeshCoord`\\s, + :class:`~iris.experimental.ugrid.MeshCoord`'s, or ``None``. """ @@ -2383,13 +2381,12 @@ def location(self): :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. - Returns: - - * location (str or None): + Returns + ------- + location : str or None The mesh location of the cube - :class:`~iris.experimental.ugrid.MeshCoord`\\s - (i.e. one of 'face' / 'edge' / 'node'), - or ``None``. + :class:`~iris.experimental.ugrid.MeshCoord`'s + (i.e. one of 'face' / 'edge' / 'node'), or ``None``. """ result = self._any_meshcoord() @@ -2402,10 +2399,10 @@ def mesh_dim(self): :class:`~iris.experimental.ugrid.MeshCoord`\\ s, or ``None`` if it has none. - Returns: - - * mesh_dim (int, or None): - the cube dimension which the cube + Returns + ------- + mesh_dim : int or None + The cube dimension which the cube :class:`~iris.experimental.ugrid.MeshCoord`\\s map to, or ``None``. @@ -4191,7 +4188,6 @@ def aggregated_by(self, coords, aggregator, climatological=False, **kwargs): Examples -------- - >>> import iris >>> import iris.analysis >>> import iris.coord_categorisation as cat diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 288161deb6..ce379a204e 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -266,10 +266,10 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: * nc_cell_methods: The value of the cell methods attribute to be split. - Returns: - - * nc_cell_methods_matches: A list of the re.Match objects associated with - each parsed cell method + Returns + ------- + nc_cell_methods_matches: list of re.Match objects + A list of re.Match objects associated with each parsed cell method Splitting is done based on words followed by colons outside of any brackets. Validation of anything other than being laid out in the expected format is @@ -337,10 +337,9 @@ def parse_cell_methods(nc_cell_methods): * nc_cell_methods (str): The value of the cell methods attribute to be parsed. - Returns: - - * cell_methods - An iterable of :class:`iris.coords.CellMethod`. + Returns + ------- + iterable of :class:`iris.coords.CellMethod`. Multiple coordinates, intervals and comments are supported. If a method has a non-standard name a warning will be issued, but the diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 0acc03967b..737955b9a7 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -6,9 +6,8 @@ according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. References: - -[CF] NetCDF Climate and Forecast (CF) Metadata conventions. -[NUG] NetCDF User's Guide, https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/ + [CF] NetCDF Climate and Forecast (CF) Metadata conventions. + [NUG] NetCDF User's Guide, https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/ """ diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index f3ed22377e..10da402520 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -317,9 +317,9 @@ def _reshape_vector_args(values_and_dims): Input arrays with associated mapping dimension numbers. The length of each 'dims' must match the ndims of the 'value'. - Returns: - - * reshaped_arrays (iterable of arrays). + Returns + ------- + reshaped_arrays : iterable of arrays The inputs, transposed and reshaped onto common target dimensions. """ @@ -371,9 +371,9 @@ def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): dimension (typically of length 2) when compared to the points array i.e. bounds.shape = points.shape + (nvertex,) - Returns: - - A (points, bounds) tuple. + Returns + ------- + (points, bounds) tuple. """ array = points @@ -634,9 +634,10 @@ def _convert_time_coords( to (). The length of each dims tuple should equal the dimensionality of the corresponding array of values. - Returns: - - A list of (coordinate, dims) tuples. The coordinates are instance of + Returns + ------- + list of (coordinate, dims) tuples. + The coordinates are instance of :class:`iris.coords.DimCoord` if possible, otherwise they are instance of :class:`iris.coords.AuxCoord`. When the coordinate is of length one, the `dims` value is None rather than an empty tuple. diff --git a/lib/iris/fileformats/um/_optimal_array_structuring.py b/lib/iris/fileformats/um/_optimal_array_structuring.py index b43c4a2e50..ce2cba7d5c 100644 --- a/lib/iris/fileformats/um/_optimal_array_structuring.py +++ b/lib/iris/fileformats/um/_optimal_array_structuring.py @@ -28,8 +28,9 @@ def _optimal_dimensioning_structure(structure, element_priorities): A dictionary mapping structure element names to their priority as defined by their input order to :func:`~optimal_array_structure`. - Returns: - + Returns + ------- + array structure or an empty list The determined optimal array structure or an empty list if no structure options were determined. @@ -69,7 +70,6 @@ def optimal_array_structure(ordering_elements, actual_values_elements=None): priority when associating dimensions with specific elements. Returns: - dims_shape, primary_elements, element_arrays_and_dims, where: * 'dims_shape' is the shape of the vector dimensions chosen. diff --git a/lib/iris/util.py b/lib/iris/util.py index 458fc3b8b2..ae26b77539 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -51,8 +51,8 @@ def broadcast_to_shape(array, shape, dim_map): to, so the first element of *dim_map* gives the index of *shape* that corresponds to the first dimension of *array* etc. - Examples: - + Examples + -------- Broadcasting an array of shape (2, 3) to the shape (5, 2, 6, 3) where the first dimension of the array corresponds to the second element of the desired shape and the second dimension of the array @@ -303,7 +303,6 @@ def rolling_window(a, window=1, step=1, axis=-1): Axis to take the rolling window over Returns: - Array that is a view of the original array with an added dimension of the size of the given window at axis + 1. @@ -590,15 +589,13 @@ def monotonic(array, strict=False, return_direction=False): or -1 for negative. The direction is meaningless if the array is not monotonic. - Returns: - - * monotonic_status (boolean) + Returns + ------- + monotonic_status : bool Whether the array was monotonic. If the return_direction flag was given then the returned value - will be: - - ``(monotonic_status, direction)`` + will be: ``(monotonic_status, direction)`` Notes ------ @@ -1761,16 +1758,18 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): The absolute value tolerance to apply in coordinate bounds checking. - Returns: - - * result (`numpy.ndarray` of bool) : + Returns + ------- + result : `numpy.ndarray` of bool true/false map of which cells in the cube XY grid have discontiguities in the coordinate points array. This can be used as the input array for :func:`iris.util.mask_cube`. - Examples:: + Examples + -------- + :: # Find any unknown discontiguities in your cube's x and y arrays: discontiguities = iris.util.find_discontiguities(cube) @@ -1896,7 +1895,6 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): Parameters ---------- - cube : iris.cube.Cube Cube containing data that requires masking. @@ -1913,13 +1911,11 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): Returns ------- - iris.cube.Cube A cube whose data array is masked at points specified by ``points_to_mask``. Notes ----- - If either ``cube`` or ``points_to_mask`` is lazy, the result will be lazy. This function maintains laziness when called; it does not realise data. @@ -1966,9 +1962,9 @@ def equalise_attributes(cubes): * cubes (iterable of :class:`iris.cube.Cube`): A collection of cubes to compare and adjust. - Returns: - - * removed (list): + Returns + ------- + list A list of dicts holding the removed attributes. Notes From 3383f006782d67da89735314b04efa5413d9b4e8 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Wed, 20 Dec 2023 13:17:48 +0000 Subject: [PATCH 132/134] ruff rule D211, D300, D403 compliance. (#5661) * git compliane for D300. * ruff compliance. * fix doctests. --- .ruff.toml | 3 --- benchmarks/benchmarks/aux_factory.py | 1 - benchmarks/benchmarks/cube.py | 1 - lib/iris/analysis/_scipy_interpolate.py | 1 - lib/iris/experimental/ugrid/mesh.py | 2 +- lib/iris/tests/test_analysis_calculus.py | 2 +- lib/iris/tests/test_coding_standards.py | 4 ++-- lib/iris/tests/test_coordsystem.py | 2 +- .../interpolation/test_RectilinearInterpolator.py | 4 +++- .../tests/unit/common/lenient/test__lenient_client.py | 8 ++++---- .../tests/unit/common/lenient/test__lenient_service.py | 8 ++++---- lib/iris/tests/unit/cube/test_Cube.py | 2 +- 12 files changed, 17 insertions(+), 21 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index f9e3770410..03fe7d4863 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -38,10 +38,7 @@ lint.ignore = [ "D202", # No blank lines allowed after function docstring "D205", # 1 blank line required between summary line and description "D209", # Multi-line docstring closing quotes should be on a separate line - "D211", # No blank lines allowed before class docstring - "D300", # triple double quotes `""" / Use triple single quotes `'''` "D401", # First line of docstring should be in imperative mood: ... - "D403", # First word of the first line should be capitalized "D404", # First word of the docstring should not be "This" "D405", # Section name should be properly capitalized "D406", # Section name should end with a newline diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index aed50c2854..c9881cf60e 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -18,7 +18,6 @@ class FactoryCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """A base class running a generalised suite of benchmarks for any factory. Factory to be specified in a subclass. diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index 8fcea35499..67abe2577c 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -30,7 +30,6 @@ class ComponentCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """A base class running a generalised suite of benchmarks for cubes that include a specified component (e.g. Coord, CellMeasure etc.). Component to be specified in a subclass. diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index bf1796f91e..251fb4bf70 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -40,7 +40,6 @@ def _ndim_coords_from_arrays(points, ndim=None): # source: https://github.com/scipy/scipy/blob/b94a5d5ccc08dddbc88453477ff2625\ # 9aeaafb32/scipy/interpolate/interpolate.py#L1400 class _RegularGridInterpolator: - """Interpolation on a regular grid in arbitrary dimensions. The data must be defined on a regular grid; the grid spacing however may be diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 98fd08d643..14bb313474 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -2140,7 +2140,7 @@ def _add(self, coords): setattr(self, member_y, coords[1]) def add(self, node_x=None, node_y=None, edge_x=None, edge_y=None): - """use self.remove(edge_x=True) to remove a coordinate e.g., using the + """Use self.remove(edge_x=True) to remove a coordinate e.g., using the pattern self.add(edge_x=None) will not remove the edge_x coordinate. """ diff --git a/lib/iris/tests/test_analysis_calculus.py b/lib/iris/tests/test_analysis_calculus.py index 57c2414875..70c1077def 100644 --- a/lib/iris/tests/test_analysis_calculus.py +++ b/lib/iris/tests/test_analysis_calculus.py @@ -503,7 +503,7 @@ def build_cube(data, spherical=False): class TestCalculusWKnownSolutions(tests.IrisTest): def get_coord_pts(self, cube): - """return (x_pts, x_ones, y_pts, y_ones, z_pts, z_ones) for the given cube.""" + """Return (x_pts, x_ones, y_pts, y_ones, z_pts, z_ones) for the given cube.""" x = cube.coord(axis="X") y = cube.coord(axis="Y") z = cube.coord(axis="Z") diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 78d6bd67e1..44bd2dc868 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -136,9 +136,9 @@ def test_categorised_warnings(): .. code-block:: python class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): - \""" + \"\"\" One-off combination of warning classes - enhances user filtering. - \""" + \"\"\" pass """ diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index ea839cff00..69aeeaa1b1 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -250,7 +250,7 @@ def test_update_to_equivalent(self): class Test_GeogCS_mutation(tests.IrisTest): - "Test that altering attributes of a GeogCS instance behaves as expected." + """Test that altering attributes of a GeogCS instance behaves as expected.""" def test_semi_major_axis_change(self): # Clear datum diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py index e02855513e..b37fa1de62 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py @@ -329,7 +329,9 @@ def test_fully_wrapped_not_circular(self): class Test___call___1D_singlelendim(ThreeDimCube): def setUp(self): - """thingness / (1) (wibble: 2; latitude: 1) + """Setup. + + thingness / (1) (wibble: 2; latitude: 1) Dimension coordinates: wibble x - latitude - x diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_client.py b/lib/iris/tests/unit/common/lenient/test__lenient_client.py index a0c630bc21..54e2aca185 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_client.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_client.py @@ -76,9 +76,9 @@ def myclient(*args, **kwargs): def test_call_naked_doc(self): @_lenient_client def myclient(): - """myclient doc-string.""" + """Myclient doc-string.""" - self.assertEqual(myclient.__doc__, "myclient doc-string.") + self.assertEqual(myclient.__doc__, "Myclient doc-string.") def test_call_no_kwargs(self): @_lenient_client() @@ -169,9 +169,9 @@ def myclient(*args, **kwargs): def test_call_doc(self): @_lenient_client() def myclient(): - """myclient doc-string.""" + """Myclient doc-string.""" - self.assertEqual(myclient.__doc__, "myclient doc-string.") + self.assertEqual(myclient.__doc__, "Myclient doc-string.") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_service.py b/lib/iris/tests/unit/common/lenient/test__lenient_service.py index 6e53aeb952..a916779c79 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_service.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_service.py @@ -67,9 +67,9 @@ def myservice(*args, **kwargs): def test_call_naked_doc(self): @_lenient_service def myservice(): - """myservice doc-string.""" + """Myservice doc-string.""" - self.assertEqual(myservice.__doc__, "myservice doc-string.") + self.assertEqual(myservice.__doc__, "Myservice doc-string.") def test_call(self): @_lenient_service() @@ -105,9 +105,9 @@ def myservice(*args, **kwargs): def test_call_doc(self): @_lenient_service() def myservice(): - """myservice doc-string.""" + """Myservice doc-string.""" - self.assertEqual(myservice.__doc__, "myservice doc-string.") + self.assertEqual(myservice.__doc__, "Myservice doc-string.") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index e883140121..27f1756770 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -961,7 +961,7 @@ class Test_slices_dim_order(tests.IrisTest): """ def setUp(self): - """setup a 4D iris cube, each dimension is length 1. + """Setup a 4D iris cube, each dimension is length 1. The dimensions are; dim1: time dim2: height From a3410ceb7d5294361e3c4db18ca2c77961eab450 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Wed, 20 Dec 2023 18:11:19 +0000 Subject: [PATCH 133/134] ruff compliance for D409 and D410. (#5662) * ruff compliance for D409 and D410. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix doctest. * fix line * fix doctest. * fix merge --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .ruff.toml | 2 -- lib/iris/analysis/cartography.py | 8 ++--- lib/iris/analysis/maths.py | 22 +++++++------- lib/iris/analysis/trajectory.py | 2 +- lib/iris/common/resolve.py | 6 ++-- lib/iris/cube.py | 3 +- lib/iris/fileformats/dot.py | 3 +- lib/iris/iterate.py | 2 +- lib/iris/palette.py | 2 +- lib/iris/pandas.py | 6 ++-- lib/iris/plot.py | 36 +++++++++++------------ lib/iris/quickplot.py | 20 ++++++------- lib/iris/util.py | 50 ++++++++++++++++---------------- 13 files changed, 80 insertions(+), 82 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 03fe7d4863..33d5879122 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -43,8 +43,6 @@ lint.ignore = [ "D405", # Section name should be properly capitalized "D406", # Section name should end with a newline "D407", # Missing dashed underline after section - "D409", # Section underline should match the length of its name - "D410", # Missing blank line after section # pyupgrade (UP) # https://docs.astral.sh/ruff/rules/#pyupgrade-up diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index c4a71e97e8..c0613028e3 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -65,7 +65,7 @@ def wrap_lons(lons, base, period): [-175. 30. 160. 75.] Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -258,7 +258,7 @@ def get_xy_grids(cube): x, y = get_xy_grids(cube) Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -289,7 +289,7 @@ def get_xy_contiguous_bounded_grids(cube): xs, ys = get_xy_contiguous_bounded_grids(cube) Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -499,7 +499,7 @@ def cosine_latitude_weights(cube): weights = np.sqrt(cosine_latitude_weights(cube)) Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index dc0b2158b6..3d1df8d66f 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -107,7 +107,7 @@ def abs(cube, in_place=False): An instance of :class:`iris.cube.Cube`. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -154,7 +154,7 @@ def intersection_of_cubes(cube, other_cube): cube1, cube2 = (intersections[0], intersections[1]) Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -236,7 +236,7 @@ def add(cube, other, dim=None, in_place=False): iris.cube.Cube Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -289,7 +289,7 @@ def subtract(cube, other, dim=None, in_place=False): iris.cube.Cube Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -392,7 +392,7 @@ def multiply(cube, other, dim=None, in_place=False): iris.cube.Cube Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -476,7 +476,7 @@ def divide(cube, other, dim=None, in_place=False): iris.cube.Cube Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -542,7 +542,7 @@ def exponentiate(cube, exponent, in_place=False): An instance of :class:`iris.cube.Cube`. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -593,7 +593,7 @@ def exp(cube, in_place=False): An instance of :class:`iris.cube.Cube`. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -623,7 +623,7 @@ def log(cube, in_place=False): An instance of :class:`iris.cube.Cube`. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -657,7 +657,7 @@ def log2(cube, in_place=False): An instance of :class:`iris.cube.Cube`. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -687,7 +687,7 @@ def log10(cube, in_place=False): An instance of :class:`iris.cube.Cube`. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 53dcc0ceac..d8a3202fbb 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -207,7 +207,7 @@ def interpolate(cube, sample_points, method=None): interpolated_cube = interpolate(cube, sample_points) Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index ec73792915..84ea6eed24 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -205,7 +205,7 @@ class Resolve: >>> resolver = Resolve(cube1, cube2) >>> results = [resolver.cube(data) for data in payload] - """ # noqa: D214, D411 + """ # noqa: D214, D410, D411 def __init__(self, lhs=None, rhs=None): """Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and @@ -2493,7 +2493,7 @@ def mapped(self): >>> resolver.map_rhs_to_lhs False - """ # noqa: D214, D411 + """ # noqa: D214, D410, D411 result = None if self.mapping is not None: result = self._src_cube.ndim == len(self.mapping) @@ -2554,5 +2554,5 @@ def shape(self): >>> Resolve(cube2, cube1).shape (240, 37, 49) - """ # noqa: D214, D411 + """ # noqa: D214, D410, D411 return self._broadcast_shape diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 3b53eaee9d..1199831b7b 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -4517,7 +4517,8 @@ def rolling_window(self, coord, aggregator, window, **kwargs): Notice that the forecast_period dimension now represents the 4 possible windows of size 3 from the original cube. - """ # noqa: D214, D411 + """ # noqa: D214, D410, D411 + # Update weights kwargs (if necessary) to handle different types of # weights weights_info = None diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index ff0e26bf84..3c24145073 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -345,8 +345,7 @@ def _dot_node(indent, id, name, attributes): The visual name of the node. attributes: An iterable of (name, value) attribute pairs. - - """ # noqa: D411 + """ # noqa: D410, D411 attributes = r"\n".join("%s: %s" % item for item in attributes) template = """%(indent)s"%(id)s" [ %(indent)s label = "%(name)s|%(attributes)s" diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index c4565d05c9..41b3929464 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -58,7 +58,7 @@ def izip(*cubes, **kwargs): ... pass Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ diff --git a/lib/iris/palette.py b/lib/iris/palette.py index acea46432a..3180f1e02a 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -117,7 +117,7 @@ def cmap_norm(cube): :class:`iris.palette.SymmetricNormalize` Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index ed33990f5c..dd7d0d31fc 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -217,7 +217,7 @@ def as_cubes( :class:`~iris.coords.AncillaryVariable` objects. Returns - -------- + ------- :class:`~iris.cube.CubeList` One :class:`~iris.cube.Cube` for each column not referenced in `aux_coord_cols`/`cell_measure_cols`/`ancillary_variable_cols`. @@ -584,7 +584,7 @@ def as_series(cube, copy=True): make sure it is not masked and use copy=False. Notes - ------ + ----- Since this function converts to/from a Pandas object, laziness will not be preserved. """ @@ -797,7 +797,7 @@ def as_data_frame( Name: surface_temperature, Length: 419904, dtype: float32 Notes - ------ + ----- Since this function converts to/from a Pandas object, laziness will not be preserved. """ diff --git a/lib/iris/plot.py b/lib/iris/plot.py index b4e20c57af..06dfe79aa9 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -1057,7 +1057,7 @@ def contour(cube, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1085,7 +1085,7 @@ def contourf(cube, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1159,7 +1159,7 @@ def default_projection(cube): ax = plt.ax(projection=default_projection(cube)) Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1181,7 +1181,7 @@ def default_projection_extent(cube, mode=iris.coords.POINT_MODE): The default is iris.coords.POINT_MODE. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1223,7 +1223,7 @@ def orography_at_bounds(cube, facecolor="#888888", coords=None, axes=None): """Plots orography defined at cell boundaries from the given Cube. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -1262,7 +1262,7 @@ def orography_at_points(cube, facecolor="#888888", coords=None, axes=None): """Plots orography defined at sample points from the given Cube. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -1311,7 +1311,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): provided. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1357,7 +1357,7 @@ def pcolor(cube, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1395,7 +1395,7 @@ def pcolormesh(cube, *args, **kwargs): valid keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1424,7 +1424,7 @@ def points(cube, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1516,7 +1516,7 @@ def barbs(u_cube, v_cube, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1570,7 +1570,7 @@ def quiver(u_cube, v_cube, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1620,7 +1620,7 @@ def plot(*args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1656,7 +1656,7 @@ def scatter(x, y, *args, **kwargs): valid keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1695,7 +1695,7 @@ def fill_between(x, y1, y2, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1729,7 +1729,7 @@ def hist(x, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1778,7 +1778,7 @@ def symbols(x, y, symbols, size, axes=None, units="inches"): The unit for the symbol size. Notes - ------ + ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1906,7 +1906,7 @@ def animate(cube_iterator, plot_func, fig=None, **kwargs): >>> iplt.show() Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 8ed76866aa..fcb0b0d5b7 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -154,7 +154,7 @@ def contour(cube, *args, **kwargs): See :func:`iris.plot.contour` for details of valid keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -185,7 +185,7 @@ def contourf(cube, *args, **kwargs): See :func:`iris.plot.contourf` for details of valid keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -216,7 +216,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): width in patch.linewidth in matplotlibrc is used. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -235,7 +235,7 @@ def pcolor(cube, *args, **kwargs): See :func:`iris.plot.pcolor` for details of valid keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -252,7 +252,7 @@ def pcolormesh(cube, *args, **kwargs): See :func:`iris.plot.pcolormesh` for details of valid keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -270,7 +270,7 @@ def points(cube, *args, **kwargs): See :func:`iris.plot.points` for details of valid keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -290,7 +290,7 @@ def plot(*args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -309,7 +309,7 @@ def scatter(x, y, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -327,7 +327,7 @@ def fill_between(x, y1, y2, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -344,7 +344,7 @@ def hist(x, *args, **kwargs): keyword arguments. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ diff --git a/lib/iris/util.py b/lib/iris/util.py index ae26b77539..59a171fa04 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -67,7 +67,7 @@ def broadcast_to_shape(array, shape, dim_map): result = broadcast_to_shape(a, (96, 48, 12), (1, 0)) Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -138,7 +138,7 @@ def delta(ndarray, dimension, circular=False): array([90, 90, 90, 90]) Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -192,7 +192,7 @@ def describe_diff(cube_a, cube_b, output_file=None): scope of this function. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -248,7 +248,7 @@ def guess_coord_axis(coord): 'T', 'Z', 'Y', 'X', or None. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -320,7 +320,7 @@ def rolling_window(a, window=1, step=1, axis=-1): [ 6., 7., 8.]]) Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -376,7 +376,7 @@ def array_equal(array1, array2, withnans=False): with additional support for arrays of strings and NaN-tolerant operation. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -411,7 +411,7 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): will return False. Notes - ------ + ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -464,7 +464,7 @@ def between(lh, rh, lh_inclusive=True, rh_inclusive=True): print(i, between_3_and_6(i)) Notes - ------ + ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -522,7 +522,7 @@ def reverse(cube_or_array, coords_or_dims): [15 14 13 12]]] Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -598,7 +598,7 @@ def monotonic(array, strict=False, return_direction=False): will be: ``(monotonic_status, direction)`` Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -653,7 +653,7 @@ def column_slices_generator(full_slice, ndims): approach of [(3, 5), : , (1, 6, 8)] for column based indexing. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1028,7 +1028,7 @@ def clip_string(the_str, clip_length=70, rider="..."): original string is returned unaltered. Notes - ------ + ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -1061,7 +1061,7 @@ def format_array(arr): For customisations, use the :mod:`numpy.core.arrayprint` directly. Notes - ------ + ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1113,7 +1113,7 @@ def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy (1, 360, 360) Notes - ------ + ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -1225,7 +1225,7 @@ def squeeze(cube): (360, 360) Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1298,7 +1298,7 @@ def is_regular(coord): """Determine if the given coord is regular. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -1315,7 +1315,7 @@ def regular_step(coord): """Return the regular step from a coord or fail. Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1351,7 +1351,7 @@ def regular_points(zeroth, step, count): The number of point values. Notes - ------ + ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -1377,7 +1377,7 @@ def points_step(points): Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ @@ -1410,7 +1410,7 @@ def unify_time_units(cubes): An iterable containing :class:`iris.cube.Cube` instances. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1556,7 +1556,7 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): time x - - Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1685,7 +1685,7 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): year x - - Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1783,7 +1783,7 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): iplt.pcolormesh(masked_cube_slice) Notes - ------ + ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1968,7 +1968,7 @@ def equalise_attributes(cubes): A list of dicts holding the removed attributes. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -2040,7 +2040,7 @@ def is_masked(array): Whether or not the array has any masks. Notes - ------ + ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. From 9d838114153865389dfe1e8d2b515f65566877ba Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Wed, 20 Dec 2023 18:55:14 +0000 Subject: [PATCH 134/134] ruff compliance for D209. (#5663) * ruff compliance for D209. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .ruff.toml | 1 - benchmarks/benchmarks/aux_factory.py | 6 ++++-- benchmarks/benchmarks/coords.py | 6 ++++-- benchmarks/benchmarks/cube.py | 7 +++++-- benchmarks/benchmarks/experimental/ugrid/__init__.py | 6 ++++-- lib/iris/analysis/trajectory.py | 6 ++++-- lib/iris/fileformats/_nc_load_rules/helpers.py | 10 +++++++--- lib/iris/tests/integration/netcdf/test_general.py | 6 ++++-- lib/iris/tests/test_plot.py | 6 ++++-- .../test__RegularGridInterpolator.py | 3 ++- lib/iris/tests/unit/coords/test_Cell.py | 9 ++++++--- .../plot/test__check_bounds_contiguity_and_mask.py | 3 ++- .../test__check_geostationary_coords_and_convert.py | 3 ++- .../plot/test__get_plot_defn_custom_coords_picked.py | 3 ++- noxfile.py | 12 ++++++++---- 15 files changed, 58 insertions(+), 29 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 33d5879122..9451023469 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -37,7 +37,6 @@ lint.ignore = [ "D200", # One-line docstring should fit on one line "D202", # No blank lines allowed after function docstring "D205", # 1 blank line required between summary line and description - "D209", # Multi-line docstring closing quotes should be on a separate line "D401", # First line of docstring should be in imperative mood: ... "D404", # First word of the docstring should not be "This" "D405", # Section name should be properly capitalized diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index c9881cf60e..6f71e47086 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -36,8 +36,10 @@ def setup_common(self): self.factory = self.create() def time_create(self): - """Create an instance of the benchmarked factory. create method is - specified in the subclass.""" + """Create an instance of the benchmarked factory. + + Create method is specified in the subclass. + """ self.create() diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index 930cddb4f1..f90ed1fd31 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -43,8 +43,10 @@ def setup_common(self): self.component = self.create() def time_create(self): - """Create an instance of the benchmarked coord. create method is - specified in the subclass.""" + """Create an instance of the benchmarked factory. + + Create method is specified in the subclass. + """ self.create() diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index 67abe2577c..ef42e03077 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -45,8 +45,11 @@ def setup(self): raise NotImplementedError def create(self): - """Generic cube creation. cube_kwargs allow dynamic inclusion of - different components; specified in subclasses.""" + """Generic cube creation. + + cube_kwargs allow dynamic inclusion of different components; + specified in subclasses. + """ return cube.Cube(data=data_2d, **self.cube_kwargs) def setup_common(self): diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py index add9b0d37c..322fe9acc0 100644 --- a/benchmarks/benchmarks/experimental/ugrid/__init__.py +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -40,8 +40,10 @@ def create(self): raise NotImplementedError def time_create(self, *params): - """Create an instance of the benchmarked object. create() method is - specified in the subclass.""" + """Create an instance of the benchmarked object. + + create() method is specified in the subclass. + """ self.create() diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index d8a3202fbb..ed5b911b0c 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -16,8 +16,10 @@ class _Segment: - """A single trajectory line segment: Two points, as described in the - Trajectory class.""" + """A single trajectory line segment. + + Two points, as described in the Trajectory class. + """ def __init__(self, p0, p1): # check keys diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index ce379a204e..31cc4aaa6c 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -1542,8 +1542,11 @@ def is_rotated_longitude(engine, cf_name): ################################################################################ def has_supported_mercator_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has the supported - values for the parameters of the Mercator projection.""" + """Determine whether the CF grid mapping variable has the supported values. + + Determine whether the CF grid mapping variable has the supported + values for the parameters of the Mercator projection. + """ is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] @@ -1567,7 +1570,8 @@ def has_supported_mercator_parameters(engine, cf_name): ################################################################################ def has_supported_polar_stereographic_parameters(engine, cf_name): """Determine whether the CF grid mapping variable has the supported - values for the parameters of the Polar Stereographic projection.""" + values for the parameters of the Polar Stereographic projection. + """ is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py index 673b988557..8c27742185 100644 --- a/lib/iris/tests/integration/netcdf/test_general.py +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -225,8 +225,10 @@ def test_single_packed_unsigned(self): self._single_test("u1", "single_packed_unsigned.cdl") def test_single_packed_manual_scale(self): - """Test saving a single CF-netCDF file with packing with scale - factor and add_offset set manually.""" + """Test saving a single CF-netCDF file. + + File with packing with scale factor and add_offset set manually. + """ self._single_test("i2", "single_packed_manual.cdl", manual=True) def _multi_test(self, CDLfilename, multi_dtype=False): diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 35133a39d5..64f128c408 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -543,7 +543,8 @@ class SliceMixin: """Mixin class providing tests for each 2-dimensional permutation of axes. Requires self.draw_method to be the relevant plotting function, - and self.results to be a dictionary containing the desired test results.""" + and self.results to be a dictionary containing the desired test results. + """ def test_yx(self): cube = self.wind[0, 0, :, :] @@ -727,7 +728,8 @@ class Slice1dMixin: """Mixin class providing tests for each 1-dimensional permutation of axes. Requires self.draw_method to be the relevant plotting function, - and self.results to be a dictionary containing the desired test results.""" + and self.results to be a dictionary containing the desired test results. + """ def test_x(self): cube = self.wind[0, 0, 0, :] diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py index 5ef677ad4a..374e355c32 100644 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py +++ b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py @@ -3,7 +3,8 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the -:func:`iris.analysis._scipy_interpolate._RegularGridInterpolator` class.""" +:func:`iris.analysis._scipy_interpolate._RegularGridInterpolator` class. +""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py index 0011f6349d..18d34b7d39 100644 --- a/lib/iris/tests/unit/coords/test_Cell.py +++ b/lib/iris/tests/unit/coords/test_Cell.py @@ -140,8 +140,10 @@ def test_PartialDateTime_other(self): class Test_contains_point(tests.IrisTest): - """Test that contains_point works for combinations of datetime, - cf.datatime, and PartialDateTime objects.""" + """Test that contains_point works for combinations. + + Combinations of datetime, cf.datatime, and PartialDateTime objects. + """ def test_datetime_PartialDateTime_point(self): point = PartialDateTime(month=6) @@ -236,7 +238,8 @@ def test_cftime_360_day_cftime_360day_point(self): class Test_numpy_comparison(tests.IrisTest): """Unit tests to check that the results of comparisons with numpy types can be - used as truth values.""" + used as truth values. + """ def test_cell_lhs(self): cell = Cell(point=1.5) diff --git a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py index b336c429bc..9ec80cbd50 100644 --- a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py +++ b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py @@ -3,7 +3,8 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._check_bounds_contiguity_and_mask` -function.""" +function. +""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py index 1ff7195afb..bf724c443c 100644 --- a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py +++ b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py @@ -3,7 +3,8 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._check_geostationary_coords_and_convert -function.""" +function. +""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py index fb6de798e8..7b39043559 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py @@ -3,7 +3,8 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._get_plot_defn_custom_coords_picked` -function.""" +function. +""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/noxfile.py b/noxfile.py index 4bdde52d41..4d3bb85f98 100644 --- a/noxfile.py +++ b/noxfile.py @@ -47,14 +47,18 @@ def session_cachefile(session: nox.sessions.Session) -> Path: def venv_populated(session: nox.sessions.Session) -> bool: - """Returns True if the conda venv has been created - and the list of packages in the lockfile installed.""" + """List of packages in the lockfile installed. + + Returns True if the conda venv has been created. + """ return session_cachefile(session).is_file() def venv_changed(session: nox.sessions.Session) -> bool: - """Returns True if the installed session is different to that specified - in the lockfile.""" + """Returns True if the installed session is different. + + Compares to that specified in the lockfile. + """ changed = False cache = session_cachefile(session) lockfile = session_lockfile(session)