diff --git a/.flake8 b/.flake8 index 257b9b3d62..131b6eb1ff 100644 --- a/.flake8 +++ b/.flake8 @@ -15,6 +15,8 @@ ignore = E402, # E501: line too long E501, + # E731: do not assign a lambda expression, use a def + E731, # W503: line break before binary operator W503, # W504: line break after binary operator @@ -26,8 +28,8 @@ exclude = .eggs, build, compiled_krb, - sphinxext, - tools, + docs/iris/src/sphinxext/*, + tools/*, # # ignore auto-generated files # @@ -38,3 +40,7 @@ exclude = # ignore third-party files # gitwash_dumper.py, + # + # convenience imports + # + lib/iris/common/__init__.py diff --git a/.gitignore b/.gitignore index 48cddc53be..d589c306fe 100644 --- a/.gitignore +++ b/.gitignore @@ -55,11 +55,8 @@ lib/iris/tests/results/imagerepo.lock *.cover # Auto generated documentation files -docs/iris/src/_static/random_image.js -docs/iris/src/_templates/gallery.html -docs/iris/src/examples/ -docs/iris/src/iris/ -docs/iris/src/matplotlibrc +docs/iris/src/_build/* +docs/iris/src/generated # Example test results docs/iris/iris_image_test_output/ diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000000..55c24eaddc --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,20 @@ +version: 2 + +build: + image: latest + +conda: + environment: ci/requirements/readthedocs.yml + +sphinx: + configuration: docs/iris/src/conf.py + fail_on_warning: false + +python: + install: + - method: setuptools + path: . + +formats: + - htmlzip + - pdf diff --git a/.travis.yml b/.travis.yml index 312914d634..5015ac153e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,12 +15,15 @@ env: matrix: - PYTHON_VERSION=3.6 TEST_TARGET=default TEST_MINIMAL=true - PYTHON_VERSION=3.6 TEST_TARGET=default TEST_BLACK=true - - PYTHON_VERSION=3.6 TEST_TARGET=example - + - PYTHON_VERSION=3.6 TEST_TARGET=gallery - PYTHON_VERSION=3.7 TEST_TARGET=default TEST_MINIMAL=true - PYTHON_VERSION=3.7 TEST_TARGET=default TEST_BLACK=true - - PYTHON_VERSION=3.7 TEST_TARGET=example + - PYTHON_VERSION=3.7 TEST_TARGET=gallery - PYTHON_VERSION=3.7 TEST_TARGET=doctest PUSH_BUILT_DOCS=true + - PYTHON_VERSION=3.7 TEST_TARGET=linkcheck + # TODO: Dependencies for sphinxcontrib-spelling to be in place before this + # spelling code block is enabled + #- PYTHON_VERSION=3.7 TEST_TARGET=spelling git: # We need a deep clone so that we can compute the age of the files using their git history. @@ -61,8 +64,8 @@ install: if [[ "${TEST_MINIMAL}" != true ]]; then CONDA_REQS_GROUPS="${CONDA_REQS_GROUPS} all"; fi; - if [[ "${TEST_TARGET}" == 'doctest' ]]; then - CONDA_REQS_GROUPS="${CONDA_REQS_GROUPS} docs"; + if [[ "${TEST_TARGET}" == 'doctest' || "${TEST_TARGET}" == 'linkcheck' ]]; then + CONDA_REQS_GROUPS="${CONDA_REQS_GROUPS} docs"; fi; CONDA_REQS_FILE="conda-requirements.txt"; python requirements/gen_conda_requirements.py --groups ${CONDA_REQS_GROUPS} > ${CONDA_REQS_FILE}; @@ -100,15 +103,6 @@ install: - python setup.py --quiet install - # TODO : remove when iris doesn't do an integration test requiring iris-grib. - # test against the latest version of python-eccodes. - # Conda-forge versioning is out of order (0.9.* is later than 2.12.*). - - > - if [[ "${TEST_MINIMAL}" != true ]]; then - conda install --quiet -n ${ENV_NAME} python-eccodes">=0.9.1, <2"; - conda install --quiet -n ${ENV_NAME} --no-deps iris-grib; - fi - script: # Capture install-dir: As a test command must be last for get Travis to check # the RC, so it's best to start each operation with an absolute cd. @@ -127,27 +121,9 @@ script: python -m iris.tests.runner --default-tests --system-tests; fi - - if [[ "${TEST_TARGET}" == 'example' ]]; then - python -m iris.tests.runner --example-tests; - fi - - # A call to check "whatsnew" contributions are valid, because the Iris test - # for it needs a *developer* install to be able to find the docs. - > - if [[ "${TEST_TARGET}" == 'doctest' ]]; then - cd ${INSTALL_DIR}/docs/iris/src/whatsnew; - python aggregate_directory.py --checkonly; - fi - - # When pushing built docs, attempt to make a preliminary whatsnew by calling - # 'aggregate_directory.py', before the build. - - > - if [[ "${PUSH_BUILT_DOCS}" == 'true' ]]; then - cd ${INSTALL_DIR}/docs/iris/src/whatsnew; - WHATSNEW=$(ls -d contributions_* 2>/dev/null); - if [[ -n "${WHATSNEW}" ]]; then - python aggregate_directory.py --unreleased; - fi; + if [[ "${TEST_TARGET}" == 'gallery' ]]; then + python -m iris.tests.runner --gallery-tests; fi # Build the docs. @@ -161,6 +137,31 @@ script: make clean html && make doctest; fi + # check the links in the docs + - > + if [[ "${TEST_TARGET}" == 'linkcheck' ]]; then + MPL_RC_DIR="${HOME}/.config/matplotlib"; + mkdir -p ${MPL_RC_DIR}; + echo 'backend : agg' > ${MPL_RC_DIR}/matplotlibrc; + echo 'image.cmap : viridis' >> ${MPL_RC_DIR}/matplotlibrc; + cd ${INSTALL_DIR}/docs/iris; + make clean && make linkcheck; + fi + + # TODO: Dependencies for sphinxcontrib-spelling to be in place before this + # spelling code block is enabled + + # check the spelling in the docs + # - > + # if [[ "${TEST_TARGET}" == 'spelling' ]]; then + # MPL_RC_DIR="${HOME}/.config/matplotlib"; + # mkdir -p ${MPL_RC_DIR}; + # echo 'backend : agg' > ${MPL_RC_DIR}/matplotlibrc; + # echo 'image.cmap : viridis' >> ${MPL_RC_DIR}/matplotlibrc; + # cd ${INSTALL_DIR}/docs/iris; + # make clean && make spelling; + # fi + # Split the organisation out of the slug. See https://stackoverflow.com/a/5257398/741316 for description. # NOTE: a *separate* "export" command appears to be necessary here : A command of the # form "export ORG=.." failed to define ORG for the following command (?!) @@ -176,7 +177,7 @@ script: - if [[ "${ORG}" == 'SciTools' && "${TRAVIS_EVENT_TYPE}" == 'push' && "${PUSH_BUILT_DOCS}" == 'true' ]]; then cd ${INSTALL_DIR}; pip install doctr; - doctr deploy --deploy-repo SciTools-docs/iris --built-docs docs/iris/build/html + doctr deploy --deploy-repo SciTools-docs/iris --built-docs docs/iris/src/_build/html --key-path .github/deploy_key.scitools-docs.enc --no-require-master ${TRAVIS_BRANCH:-${TRAVIS_TAG}}; diff --git a/INSTALL b/INSTALL index 9296f97a29..cf4c4d1bae 100644 --- a/INSTALL +++ b/INSTALL @@ -1,9 +1,11 @@ You can either install Iris using the conda package manager or from source. + Installing using conda ---------------------- Iris is available using conda for the following platforms: + * Linux 64-bit, * Mac OSX 64-bit, and * Windows 32-bit and 64-bit. @@ -16,8 +18,7 @@ the following command:: conda install -c conda-forge iris -If you wish to run any of the code examples -(see http://scitools.org.uk/iris/docs/latest/examples/index.html) you will also +If you wish to run any of the code in the gallery you will also need the Iris sample data. This can also be installed using conda:: conda install -c conda-forge iris-sample-data @@ -77,7 +78,7 @@ Hence the commands change to:: conda activate my_iris_env # or whatever other name you gave it pip install -e . -The tests can then be run with +The tests can then be run with:: python setup.py test diff --git a/README.md b/README.md index ee7a170822..469db4619a 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@

- - Iris
+ + Iris

+

Iris is a powerful, format-agnostic, community-driven Python library for analysing and visualising Earth science data @@ -14,6 +14,9 @@ Travis-CI + + Documentation Status conda-forge downloads @@ -29,9 +32,6 @@ Commits since last release - -Latest docs zenodo @@ -120,4 +120,4 @@ of its [GNU LGPLv3 license](COPYING.LESSER). # Contributing Information on how to contribute can be found in the [Iris developer guide](https://scitools.org.uk/iris/docs/latest/developers_guide/index.html). -(C) British Crown Copyright 2010 - 2019, Met Office +(C) British Crown Copyright 2010 - 2020, Met Office diff --git a/ci/requirements/readthedocs.yml b/ci/requirements/readthedocs.yml new file mode 100644 index 0000000000..4a1df9cc7b --- /dev/null +++ b/ci/requirements/readthedocs.yml @@ -0,0 +1,61 @@ +name: iris-docs + +channels: + - conda-forge + +dependencies: +# Dependencies necessary to run setup.py of iris +# ---------------------------------------------- + - setuptools + - pyke + +# Absolute minimal dependencies for iris +# -------------------------------------- + +# Without these, iris won't even import. + + - cartopy>=0.12 + - proj4<6 + - cf-units>=2 + - cftime + - dask>=2 + - matplotlib<3.3 + - netcdf4 + - numpy>=1.14 + - scipy + - python-xxhash + +# Dependencies needed to run the iris tests +#------------------------------------------ + + - black=19.10b0 + - filelock + - pillow<7 + - imagehash>=4.0 + - nose + - pre-commit + - requests + - asv + +# Dependencies for a feature complete installation +# ------------------------------------------------ + +# esmpy regridding not available through pip. + - esmpy>=7.0 +#gdal : under review -- not tested at present + - mo_pack + - nc-time-axis + - pandas + - python-stratify + - pyugrid + + - graphviz + +# Iris sample data is not available through pip. It can be installed from +# https://github.com/SciTools/iris-sample-data/archive/master.zip + - iris-sample-data + - sphinx + - sphinx_rtd_theme + - sphinx-copybutton + - sphinx-gallery + diff --git a/docs/iris/Makefile b/docs/iris/Makefile index 1a66b03805..e9632ddb9f 100644 --- a/docs/iris/Makefile +++ b/docs/iris/Makefile @@ -5,43 +5,48 @@ html: echo "make html in $$i..."; \ (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html); done -pdf: - @for i in $(SUBDIRS); do\ - echo "make latex in $$i.."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) latex); done - echo "\def\sphinxdocclass{MO_report}" > build/latex/docs.tex - echo "\documentclass[letterpaper,10pt,english]{MO_report}" >> build/latex/docs.tex - tail -n +4 build/latex/Iris.tex >> build/latex/docs.tex - sed 's/\\tableofcontents/\\tableofcontents\n\\pagenumbering\{arabic\}/' build/latex/docs.tex > build/latex/docs2.tex - sed 's/subsection{/section{/' build/latex/docs2.tex > build/latex/documentation.tex - (cd build/latex; pdflatex -interaction=scrollmode documentation.tex) - # call latex again to get page numbers right... - (cd build/latex; pdflatex -interaction=scrollmode documentation.tex); +html-noplot: + @for i in $(SUBDIRS); do \ + echo "make html-noplot in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-noplot); done + +spelling: + @for i in $(SUBDIRS); do \ + echo "make spelling in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) spelling); done all: @for i in $(SUBDIRS); do \ - echo "make all in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) all); done + echo "make all in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) all); done + install: @for i in $(SUBDIRS); do \ - echo "Installing in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) install); done + echo "Installing in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) install); done + build: @for i in $(SUBDIRS); do \ - echo "Clearing in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) build); done + echo "Clearing in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) build); done + clean: @for i in $(SUBDIRS); do \ - echo "Clearing in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) clean); done + echo "Clearing in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) clean); done doctest: @for i in $(SUBDIRS); do \ echo "Running doctest in $$i..."; \ (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) doctest); done -extest: +linkcheck: + @for i in $(SUBDIRS); do \ + echo "Running linkcheck in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) linkcheck); done + +gallerytest: @echo - @echo "Running \"example_code/graphics\" tests..." + @echo "Running \"gallery\" tests..." @echo python -m unittest discover -v -t . diff --git a/docs/iris/example_code/General/__init__.py b/docs/iris/example_code/General/__init__.py deleted file mode 100644 index f67741cf37..0000000000 --- a/docs/iris/example_code/General/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -General visualisation examples -============================== -""" diff --git a/docs/iris/example_code/Meteorology/__init__.py b/docs/iris/example_code/Meteorology/__init__.py deleted file mode 100644 index 39c05d08c6..0000000000 --- a/docs/iris/example_code/Meteorology/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -Meteorology visualisation examples -================================== -""" diff --git a/docs/iris/example_code/Oceanography/__init__.py b/docs/iris/example_code/Oceanography/__init__.py deleted file mode 100644 index afac828a05..0000000000 --- a/docs/iris/example_code/Oceanography/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -Oceanography visualisation examples -=================================== -""" diff --git a/docs/iris/gallery_code/README.rst b/docs/iris/gallery_code/README.rst new file mode 100644 index 0000000000..02263dc5e5 --- /dev/null +++ b/docs/iris/gallery_code/README.rst @@ -0,0 +1,26 @@ +Gallery +======= + +The gallery is divided into sections as described below. All entries +show the code used to produce the example plot. Additionally there are links +to download the code directly as source or as part of a +`jupyter notebook `_, +these links are at the bottom of the page. + +In order to successfully view the jupyter notebook locally so you may +experiment with the code you will need an environment setup with the +appropriate dependencies, see :ref:`installing_iris` for instructions. +Ensure that ``iris-sample-data`` is installed as it is used in the gallery. +Additionally ensure that you install ``jupyter``. The command to install both +is:: + + conda install -c conda-forge iris-sample-data jupyter + +Once you have downloaded the notebooks (bottom of each gallery page), +you may start the jupyter notebook via:: + + jupyter notebook + +If you wish to contribute to the gallery see the +:ref:`contributing.documentation.gallery` section of the +:ref:`contributing.documentation`. diff --git a/docs/iris/gallery_code/general/README.rst b/docs/iris/gallery_code/general/README.rst new file mode 100644 index 0000000000..c846755f1e --- /dev/null +++ b/docs/iris/gallery_code/general/README.rst @@ -0,0 +1,2 @@ +General +------- diff --git a/docs/iris/example_code/General/SOI_filtering.py b/docs/iris/gallery_code/general/plot_SOI_filtering.py similarity index 99% rename from docs/iris/example_code/General/SOI_filtering.py rename to docs/iris/gallery_code/general/plot_SOI_filtering.py index caf4810c65..116e819af7 100644 --- a/docs/iris/example_code/General/SOI_filtering.py +++ b/docs/iris/gallery_code/general/plot_SOI_filtering.py @@ -20,8 +20,10 @@ Monthly Weather Review, Vol 112, pp 326-332 """ -import numpy as np + import matplotlib.pyplot as plt +import numpy as np + import iris import iris.plot as iplt diff --git a/docs/iris/example_code/General/anomaly_log_colouring.py b/docs/iris/gallery_code/general/plot_anomaly_log_colouring.py similarity index 97% rename from docs/iris/example_code/General/anomaly_log_colouring.py rename to docs/iris/gallery_code/general/plot_anomaly_log_colouring.py index 95af1e1f61..b0cee818de 100644 --- a/docs/iris/example_code/General/anomaly_log_colouring.py +++ b/docs/iris/gallery_code/general/plot_anomaly_log_colouring.py @@ -13,7 +13,7 @@ To do this, we create a custom value mapping function (normalization) using the matplotlib Norm class `matplotlib.colours.SymLogNorm -`_. +`_. We use this to make a cell-filled pseudocolour plot with a colorbar. NOTE: By "pseudocolour", we mean that each data point is drawn as a "cell" @@ -27,12 +27,14 @@ See also: http://en.wikipedia.org/wiki/False_color#Pseudocolor. """ + import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import matplotlib.colors as mcols + import iris import iris.coord_categorisation import iris.plot as iplt -import matplotlib.pyplot as plt -import matplotlib.colors as mcols def main(): diff --git a/docs/iris/example_code/General/coriolis_plot.py b/docs/iris/gallery_code/general/plot_coriolis.py similarity index 99% rename from docs/iris/example_code/General/coriolis_plot.py rename to docs/iris/gallery_code/general/plot_coriolis.py index 7999e5385f..cc67d1267c 100644 --- a/docs/iris/example_code/General/coriolis_plot.py +++ b/docs/iris/gallery_code/general/plot_coriolis.py @@ -9,11 +9,13 @@ """ import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import numpy as np + + import iris from iris.coord_systems import GeogCS import iris.plot as iplt -import matplotlib.pyplot as plt -import numpy as np def main(): diff --git a/docs/iris/example_code/General/cross_section.py b/docs/iris/gallery_code/general/plot_cross_section.py similarity index 100% rename from docs/iris/example_code/General/cross_section.py rename to docs/iris/gallery_code/general/plot_cross_section.py diff --git a/docs/iris/example_code/General/custom_aggregation.py b/docs/iris/gallery_code/general/plot_custom_aggregation.py similarity index 99% rename from docs/iris/example_code/General/custom_aggregation.py rename to docs/iris/gallery_code/general/plot_custom_aggregation.py index 2e73aa277a..9c847be779 100644 --- a/docs/iris/example_code/General/custom_aggregation.py +++ b/docs/iris/gallery_code/general/plot_custom_aggregation.py @@ -13,6 +13,7 @@ certain temperature over a spell of 5 years or more. """ + import matplotlib.pyplot as plt import numpy as np diff --git a/docs/iris/example_code/General/custom_file_loading.py b/docs/iris/gallery_code/general/plot_custom_file_loading.py similarity index 100% rename from docs/iris/example_code/General/custom_file_loading.py rename to docs/iris/gallery_code/general/plot_custom_file_loading.py index 406995d94b..0d130374a7 100644 --- a/docs/iris/example_code/General/custom_file_loading.py +++ b/docs/iris/gallery_code/general/plot_custom_file_loading.py @@ -54,13 +54,13 @@ The cube returned from the load function is then used to produce a plot. """ + import datetime +from cf_units import Unit, CALENDAR_GREGORIAN import matplotlib.pyplot as plt import numpy as np -from cf_units import Unit, CALENDAR_GREGORIAN - import iris import iris.coords as icoords import iris.coord_systems as icoord_systems diff --git a/docs/iris/example_code/General/global_map.py b/docs/iris/gallery_code/general/plot_global_map.py similarity index 99% rename from docs/iris/example_code/General/global_map.py rename to docs/iris/gallery_code/general/plot_global_map.py index 72e8f28743..41fd226921 100644 --- a/docs/iris/example_code/General/global_map.py +++ b/docs/iris/gallery_code/general/plot_global_map.py @@ -6,6 +6,7 @@ title and the labels for the axes are automatically derived from the metadata. """ + import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/iris/example_code/General/inset_plot.py b/docs/iris/gallery_code/general/plot_inset.py similarity index 99% rename from docs/iris/example_code/General/inset_plot.py rename to docs/iris/gallery_code/general/plot_inset.py index 4735706ef7..46f5dc5d0f 100644 --- a/docs/iris/example_code/General/inset_plot.py +++ b/docs/iris/gallery_code/general/plot_inset.py @@ -8,10 +8,11 @@ """ +import cartopy.crs as ccrs import matplotlib.pyplot as plt import numpy as np + import iris -import cartopy.crs as ccrs import iris.quickplot as qplt import iris.plot as iplt diff --git a/docs/iris/example_code/General/lineplot_with_legend.py b/docs/iris/gallery_code/general/plot_lineplot_with_legend.py similarity index 99% rename from docs/iris/example_code/General/lineplot_with_legend.py rename to docs/iris/gallery_code/general/plot_lineplot_with_legend.py index aed636489e..5641b9c4d0 100644 --- a/docs/iris/example_code/General/lineplot_with_legend.py +++ b/docs/iris/gallery_code/general/plot_lineplot_with_legend.py @@ -3,6 +3,7 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ """ + import matplotlib.pyplot as plt import iris diff --git a/docs/iris/example_code/General/polar_stereo.py b/docs/iris/gallery_code/general/plot_polar_stereo.py similarity index 86% rename from docs/iris/example_code/General/polar_stereo.py rename to docs/iris/gallery_code/general/plot_polar_stereo.py index ac8c757ed9..bd4a11923d 100644 --- a/docs/iris/example_code/General/polar_stereo.py +++ b/docs/iris/gallery_code/general/plot_polar_stereo.py @@ -15,7 +15,7 @@ def main(): - file_path = iris.sample_data_path("polar_stereo.grib2") + file_path = iris.sample_data_path("toa_brightness_stereographic.nc") cube = iris.load_cube(file_path) qplt.contourf(cube) ax = plt.gca() diff --git a/docs/iris/example_code/General/polynomial_fit.py b/docs/iris/gallery_code/general/plot_polynomial_fit.py similarity index 100% rename from docs/iris/example_code/General/polynomial_fit.py rename to docs/iris/gallery_code/general/plot_polynomial_fit.py diff --git a/docs/iris/example_code/General/projections_and_annotations.py b/docs/iris/gallery_code/general/plot_projections_and_annotations.py similarity index 99% rename from docs/iris/example_code/General/projections_and_annotations.py rename to docs/iris/gallery_code/general/plot_projections_and_annotations.py index 4f85c43835..e59bb236d7 100644 --- a/docs/iris/example_code/General/projections_and_annotations.py +++ b/docs/iris/gallery_code/general/plot_projections_and_annotations.py @@ -13,11 +13,13 @@ We plot these over a specified region, in two different map projections. """ + import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import numpy as np + import iris import iris.plot as iplt -import numpy as np -import matplotlib.pyplot as plt # Define a Cartopy 'ordinary' lat-lon coordinate reference system. diff --git a/docs/iris/example_code/General/rotated_pole_mapping.py b/docs/iris/gallery_code/general/plot_rotated_pole_mapping.py similarity index 99% rename from docs/iris/example_code/General/rotated_pole_mapping.py rename to docs/iris/gallery_code/general/plot_rotated_pole_mapping.py index b6a18cac92..063fe93674 100644 --- a/docs/iris/example_code/General/rotated_pole_mapping.py +++ b/docs/iris/gallery_code/general/plot_rotated_pole_mapping.py @@ -11,13 +11,14 @@ * Non native projection and a Natural Earth shaded relief image underlay """ + import cartopy.crs as ccrs import matplotlib.pyplot as plt import iris +import iris.analysis.cartography import iris.plot as iplt import iris.quickplot as qplt -import iris.analysis.cartography def main(): diff --git a/docs/iris/gallery_code/meteorology/README.rst b/docs/iris/gallery_code/meteorology/README.rst new file mode 100644 index 0000000000..e8e902b498 --- /dev/null +++ b/docs/iris/gallery_code/meteorology/README.rst @@ -0,0 +1,3 @@ +Meteorology +----------- + diff --git a/docs/iris/example_code/Meteorology/COP_1d_plot.py b/docs/iris/gallery_code/meteorology/plot_COP_1d.py similarity index 100% rename from docs/iris/example_code/Meteorology/COP_1d_plot.py rename to docs/iris/gallery_code/meteorology/plot_COP_1d.py index 9b95192381..2f93627b77 100644 --- a/docs/iris/example_code/Meteorology/COP_1d_plot.py +++ b/docs/iris/gallery_code/meteorology/plot_COP_1d.py @@ -28,14 +28,15 @@ can be found in :ref:`cube-statistics`. """ -import numpy as np + import matplotlib.pyplot as plt +import numpy as np + import iris +import iris.analysis.cartography import iris.plot as iplt import iris.quickplot as qplt -import iris.analysis.cartography - def main(): # Load data into three Cubes, one for each set of NetCDF files. @@ -93,6 +94,7 @@ def main(): time=lambda cell: 1860 <= cell.point.year <= 1999 ) observed = a1b_mean.extract(constraint) + # Assert that this data set is the same as the e1 scenario: # they share data up to the 1999 cut off. assert np.all(np.isclose(observed.data, e1_mean.extract(constraint).data)) @@ -105,9 +107,7 @@ def main(): plt.title("North American mean air temperature", fontsize=18) plt.xlabel("Time / year") - plt.grid() - iplt.show() diff --git a/docs/iris/example_code/Meteorology/COP_maps.py b/docs/iris/gallery_code/meteorology/plot_COP_maps.py similarity index 99% rename from docs/iris/example_code/Meteorology/COP_maps.py rename to docs/iris/gallery_code/meteorology/plot_COP_maps.py index 840c371c14..a8e6055a77 100644 --- a/docs/iris/example_code/Meteorology/COP_maps.py +++ b/docs/iris/gallery_code/meteorology/plot_COP_maps.py @@ -21,7 +21,9 @@ doi:10.1029/2009EO210001. """ + import os.path + import matplotlib.pyplot as plt import numpy as np diff --git a/docs/iris/example_code/Meteorology/TEC.py b/docs/iris/gallery_code/meteorology/plot_TEC.py similarity index 99% rename from docs/iris/example_code/Meteorology/TEC.py rename to docs/iris/gallery_code/meteorology/plot_TEC.py index 8320af90e9..df2e29ef19 100644 --- a/docs/iris/example_code/Meteorology/TEC.py +++ b/docs/iris/gallery_code/meteorology/plot_TEC.py @@ -34,6 +34,7 @@ def main(): plt.ylabel("latitude / degrees") plt.gca().stock_img() plt.gca().coastlines() + iplt.show() diff --git a/docs/iris/example_code/Meteorology/deriving_phenomena.py b/docs/iris/gallery_code/meteorology/plot_deriving_phenomena.py similarity index 99% rename from docs/iris/example_code/Meteorology/deriving_phenomena.py rename to docs/iris/gallery_code/meteorology/plot_deriving_phenomena.py index 7b3f50a8a5..0bb1fa53a4 100644 --- a/docs/iris/example_code/Meteorology/deriving_phenomena.py +++ b/docs/iris/gallery_code/meteorology/plot_deriving_phenomena.py @@ -9,6 +9,7 @@ plot. """ + import matplotlib.pyplot as plt import matplotlib.ticker diff --git a/docs/iris/example_code/Meteorology/hovmoller.py b/docs/iris/gallery_code/meteorology/plot_hovmoller.py similarity index 99% rename from docs/iris/example_code/Meteorology/hovmoller.py rename to docs/iris/gallery_code/meteorology/plot_hovmoller.py index d8954d775f..9f18b8021e 100644 --- a/docs/iris/example_code/Meteorology/hovmoller.py +++ b/docs/iris/gallery_code/meteorology/plot_hovmoller.py @@ -8,8 +8,9 @@ temperature. """ -import matplotlib.pyplot as plt + import matplotlib.dates as mdates +import matplotlib.pyplot as plt import iris import iris.plot as iplt diff --git a/docs/iris/example_code/Meteorology/lagged_ensemble.py b/docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py similarity index 98% rename from docs/iris/example_code/Meteorology/lagged_ensemble.py rename to docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py index 298d178a1e..cb82a663d4 100644 --- a/docs/iris/example_code/Meteorology/lagged_ensemble.py +++ b/docs/iris/gallery_code/meteorology/plot_lagged_ensemble.py @@ -17,6 +17,7 @@ model, from each ensemble member. """ + import matplotlib.pyplot as plt import numpy as np @@ -40,7 +41,7 @@ def realization_metadata(cube, field, fname): import iris.coords realization_coord = iris.coords.AuxCoord( - np.int32(realization_number), "realization" + np.int32(realization_number), "realization", units="1" ) cube.add_aux_coord(realization_coord) diff --git a/docs/iris/example_code/Meteorology/wind_speed.py b/docs/iris/gallery_code/meteorology/plot_wind_speed.py similarity index 99% rename from docs/iris/example_code/Meteorology/wind_speed.py rename to docs/iris/gallery_code/meteorology/plot_wind_speed.py index 2d8081158c..6844d3874c 100644 --- a/docs/iris/example_code/Meteorology/wind_speed.py +++ b/docs/iris/gallery_code/meteorology/plot_wind_speed.py @@ -11,6 +11,8 @@ """ +import cartopy.crs as ccrs +import cartopy.feature as cfeat import matplotlib.pyplot as plt import numpy as np @@ -18,9 +20,6 @@ import iris.coord_categorisation import iris.quickplot as qplt -import cartopy.feature as cfeat -import cartopy.crs as ccrs - def main(): # Load the u and v components of wind from a pp file diff --git a/docs/iris/gallery_code/oceanography/README.rst b/docs/iris/gallery_code/oceanography/README.rst new file mode 100644 index 0000000000..0f3adf906b --- /dev/null +++ b/docs/iris/gallery_code/oceanography/README.rst @@ -0,0 +1,3 @@ +Oceanography +------------ + diff --git a/docs/iris/example_code/Oceanography/atlantic_profiles.py b/docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py similarity index 99% rename from docs/iris/example_code/Oceanography/atlantic_profiles.py rename to docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py index 8a541c11fa..a7e82c34f5 100644 --- a/docs/iris/example_code/Oceanography/atlantic_profiles.py +++ b/docs/iris/gallery_code/oceanography/plot_atlantic_profiles.py @@ -15,10 +15,12 @@ depth values intuitively increase downward on the y-axis. """ + +import matplotlib.pyplot as plt + import iris import iris.iterate import iris.plot as iplt -import matplotlib.pyplot as plt def main(): @@ -56,6 +58,7 @@ def main(): ax1.set_ylabel("Depth / m") for ticklabel in ax1.get_xticklabels(): ticklabel.set_color(temperature_color) + # To plot salinity in the same axes we use twiny(). We'll use a different # color to identify salinity. salinity_color = (0.6, 0.1, 0.15) diff --git a/docs/iris/example_code/Oceanography/load_nemo.py b/docs/iris/gallery_code/oceanography/plot_load_nemo.py similarity index 99% rename from docs/iris/example_code/Oceanography/load_nemo.py rename to docs/iris/gallery_code/oceanography/plot_load_nemo.py index 645617f600..5f2b72c956 100644 --- a/docs/iris/example_code/Oceanography/load_nemo.py +++ b/docs/iris/gallery_code/oceanography/plot_load_nemo.py @@ -7,12 +7,14 @@ different time dimensions in these files can prevent Iris from concatenating them without the intervention shown here. """ + from __future__ import unicode_literals +import matplotlib.pyplot as plt + import iris import iris.plot as iplt import iris.quickplot as qplt -import matplotlib.pyplot as plt from iris.util import promote_aux_coord_to_dim_coord @@ -57,6 +59,7 @@ def main(): cube.long_name.capitalize(), lat_string, lon_string ) ) + iplt.show() diff --git a/docs/iris/example_code/General/orca_projection.py b/docs/iris/gallery_code/oceanography/plot_orca_projection.py similarity index 100% rename from docs/iris/example_code/General/orca_projection.py rename to docs/iris/gallery_code/oceanography/plot_orca_projection.py index bf2498c229..627be8591b 100644 --- a/docs/iris/example_code/General/orca_projection.py +++ b/docs/iris/gallery_code/oceanography/plot_orca_projection.py @@ -12,9 +12,9 @@ """ +import cartopy.crs as ccrs import matplotlib.pyplot as plt -import cartopy.crs as ccrs import iris import iris.analysis.cartography import iris.plot as iplt diff --git a/docs/iris/example_tests/__init__.py b/docs/iris/gallery_tests/__init__.py similarity index 100% rename from docs/iris/example_tests/__init__.py rename to docs/iris/gallery_tests/__init__.py diff --git a/docs/iris/example_tests/extest_util.py b/docs/iris/gallery_tests/gallerytest_util.py similarity index 84% rename from docs/iris/example_tests/extest_util.py rename to docs/iris/gallery_tests/gallerytest_util.py index c96f47ae50..3ec18d0169 100644 --- a/docs/iris/example_tests/extest_util.py +++ b/docs/iris/gallery_tests/gallerytest_util.py @@ -6,7 +6,7 @@ """ Provides context managers which are fundamental to the ability -to run the example tests. +to run the gallery tests. """ @@ -23,26 +23,26 @@ import iris.quickplot as qplt -EXAMPLE_DIRECTORY = os.path.join( - os.path.dirname(os.path.dirname(__file__)), "example_code" +GALLERY_DIRECTORY = os.path.join( + os.path.dirname(os.path.dirname(__file__)), "gallery_code" ) -EXAMPLE_DIRECTORIES = [ - os.path.join(EXAMPLE_DIRECTORY, the_dir) - for the_dir in os.listdir(EXAMPLE_DIRECTORY) +GALLERY_DIRECTORIES = [ + os.path.join(GALLERY_DIRECTORY, the_dir) + for the_dir in os.listdir(GALLERY_DIRECTORY) ] @contextlib.contextmanager -def add_examples_to_path(): +def add_gallery_to_path(): """ - Creates a context manager which can be used to add the iris examples - to the PYTHONPATH. The examples are only importable throughout the lifetime + Creates a context manager which can be used to add the iris gallery + to the PYTHONPATH. The gallery entries are only importable throughout the lifetime of this context manager. """ orig_sys_path = sys.path sys.path = sys.path[:] - sys.path += EXAMPLE_DIRECTORIES + sys.path += GALLERY_DIRECTORIES yield sys.path = orig_sys_path diff --git a/docs/iris/example_tests/test_COP_1d_plot.py b/docs/iris/gallery_tests/test_plot_COP_1d.py similarity index 70% rename from docs/iris/example_tests/test_COP_1d_plot.py rename to docs/iris/gallery_tests/test_plot_COP_1d.py index d0e989a3f2..7ad59ba10b 100644 --- a/docs/iris/example_tests/test_COP_1d_plot.py +++ b/docs/iris/gallery_tests/test_plot_COP_1d.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestCOP1DPlot(tests.GraphicsTest): - """Test the COP_1d_plot example code.""" + """Test the COP_1d_plot gallery code.""" - def test_COP_1d_plot(self): + def test_plot_COP_1d(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import COP_1d_plot + with add_gallery_to_path(): + import plot_COP_1d with show_replaced_by_check_graphic(self): - COP_1d_plot.main() + plot_COP_1d.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_COP_maps.py b/docs/iris/gallery_tests/test_plot_COP_maps.py similarity index 70% rename from docs/iris/example_tests/test_COP_maps.py rename to docs/iris/gallery_tests/test_plot_COP_maps.py index 9db5060c89..5252ddfe6f 100644 --- a/docs/iris/example_tests/test_COP_maps.py +++ b/docs/iris/gallery_tests/test_plot_COP_maps.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestCOPMaps(tests.GraphicsTest): - """Test the COP_maps example code.""" + """Test the COP_maps gallery code.""" - def test_cop_maps(self): + def test_plot_cop_maps(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import COP_maps + with add_gallery_to_path(): + import plot_COP_maps with show_replaced_by_check_graphic(self): - COP_maps.main() + plot_COP_maps.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_SOI_filtering.py b/docs/iris/gallery_tests/test_plot_SOI_filtering.py similarity index 68% rename from docs/iris/example_tests/test_SOI_filtering.py rename to docs/iris/gallery_tests/test_plot_SOI_filtering.py index 2d791567b0..384a44ebd8 100644 --- a/docs/iris/example_tests/test_SOI_filtering.py +++ b/docs/iris/gallery_tests/test_plot_SOI_filtering.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestSOIFiltering(tests.GraphicsTest): - """Test the SOI_filtering example code.""" + """Test the SOI_filtering gallery code.""" - def test_soi_filtering(self): + def test_plot_soi_filtering(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import SOI_filtering + with add_gallery_to_path(): + import plot_SOI_filtering with show_replaced_by_check_graphic(self): - SOI_filtering.main() + plot_SOI_filtering.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_TEC.py b/docs/iris/gallery_tests/test_plot_TEC.py similarity index 71% rename from docs/iris/example_tests/test_TEC.py rename to docs/iris/gallery_tests/test_plot_TEC.py index 4bcd70f9f5..2852ab06b9 100644 --- a/docs/iris/example_tests/test_TEC.py +++ b/docs/iris/gallery_tests/test_plot_TEC.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestTEC(tests.GraphicsTest): - """Test the TEC example code.""" + """Test the TEC gallery code.""" - def test_TEC(self): + def test_plot_TEC(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import TEC + with add_gallery_to_path(): + import plot_TEC with show_replaced_by_check_graphic(self): - TEC.main() + plot_TEC.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_anomaly_log_colouring.py b/docs/iris/gallery_tests/test_plot_anomaly_log_colouring.py similarity index 66% rename from docs/iris/example_tests/test_anomaly_log_colouring.py rename to docs/iris/gallery_tests/test_plot_anomaly_log_colouring.py index d0f07b02c4..eaae11f6b5 100644 --- a/docs/iris/example_tests/test_anomaly_log_colouring.py +++ b/docs/iris/gallery_tests/test_plot_anomaly_log_colouring.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestAnomalyLogColouring(tests.GraphicsTest): - """Test the anomaly colouring example code.""" + """Test the anomaly colouring gallery code.""" - def test_anomaly_log_colouring(self): + def test_plot_anomaly_log_colouring(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import anomaly_log_colouring + with add_gallery_to_path(): + import plot_anomaly_log_colouring with show_replaced_by_check_graphic(self): - anomaly_log_colouring.main() + plot_anomaly_log_colouring.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_atlantic_profiles.py b/docs/iris/gallery_tests/test_plot_atlantic_profiles.py similarity index 67% rename from docs/iris/example_tests/test_atlantic_profiles.py rename to docs/iris/gallery_tests/test_plot_atlantic_profiles.py index d85dc72c2c..b69408337b 100644 --- a/docs/iris/example_tests/test_atlantic_profiles.py +++ b/docs/iris/gallery_tests/test_plot_atlantic_profiles.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestAtlanticProfiles(tests.GraphicsTest): - """Test the atlantic_profiles example code.""" + """Test the atlantic_profiles gallery code.""" - def test_atlantic_profiles(self): + def test_plot_atlantic_profiles(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import atlantic_profiles + with add_gallery_to_path(): + import plot_atlantic_profiles with show_replaced_by_check_graphic(self): - atlantic_profiles.main() + plot_atlantic_profiles.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_coriolis_plot.py b/docs/iris/gallery_tests/test_plot_coriolis.py similarity index 59% rename from docs/iris/example_tests/test_coriolis_plot.py rename to docs/iris/gallery_tests/test_plot_coriolis.py index e61fdce81d..2e4cea8a74 100644 --- a/docs/iris/example_tests/test_coriolis_plot.py +++ b/docs/iris/gallery_tests/test_plot_coriolis.py @@ -9,18 +9,18 @@ import iris.tests as tests -from . import extest_util +from . import gallerytest_util -with extest_util.add_examples_to_path(): - import coriolis_plot +with gallerytest_util.add_gallery_to_path(): + import plot_coriolis class TestCoriolisPlot(tests.GraphicsTest): - """Test the Coriolis Plot example code.""" + """Test the Coriolis Plot gallery code.""" - def test_coriolis_plot(self): - with extest_util.show_replaced_by_check_graphic(self): - coriolis_plot.main() + def test_plot_coriolis(self): + with gallerytest_util.show_replaced_by_check_graphic(self): + plot_coriolis.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_cross_section.py b/docs/iris/gallery_tests/test_plot_cross_section.py similarity index 68% rename from docs/iris/example_tests/test_cross_section.py rename to docs/iris/gallery_tests/test_plot_cross_section.py index 7fe13d825f..4b92f5f5fe 100644 --- a/docs/iris/example_tests/test_cross_section.py +++ b/docs/iris/gallery_tests/test_plot_cross_section.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestCrossSection(tests.GraphicsTest): - """Test the cross_section example code.""" + """Test the cross_section gallery code.""" - def test_cross_section(self): + def test_plot_cross_section(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import cross_section + with add_gallery_to_path(): + import plot_cross_section with show_replaced_by_check_graphic(self): - cross_section.main() + plot_cross_section.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_custom_aggregation.py b/docs/iris/gallery_tests/test_plot_custom_aggregation.py similarity index 67% rename from docs/iris/example_tests/test_custom_aggregation.py rename to docs/iris/gallery_tests/test_plot_custom_aggregation.py index 130f46d847..b674f401b4 100644 --- a/docs/iris/example_tests/test_custom_aggregation.py +++ b/docs/iris/gallery_tests/test_plot_custom_aggregation.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestCustomAggregation(tests.GraphicsTest): - """Test the custom aggregation example code.""" + """Test the custom aggregation gallery code.""" - def test_custom_aggregation(self): + def test_plot_custom_aggregation(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import custom_aggregation + with add_gallery_to_path(): + import plot_custom_aggregation with show_replaced_by_check_graphic(self): - custom_aggregation.main() + plot_custom_aggregation.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_custom_file_loading.py b/docs/iris/gallery_tests/test_plot_custom_file_loading.py similarity index 67% rename from docs/iris/example_tests/test_custom_file_loading.py rename to docs/iris/gallery_tests/test_plot_custom_file_loading.py index 9c466c53d6..d580ac9d01 100644 --- a/docs/iris/example_tests/test_custom_file_loading.py +++ b/docs/iris/gallery_tests/test_plot_custom_file_loading.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestCustomFileLoading(tests.GraphicsTest): - """Test the custom_file_loading example code.""" + """Test the custom_file_loading gallery code.""" - def test_custom_file_loading(self): + def test_plot_custom_file_loading(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import custom_file_loading + with add_gallery_to_path(): + import plot_custom_file_loading with show_replaced_by_check_graphic(self): - custom_file_loading.main() + plot_custom_file_loading.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_deriving_phenomena.py b/docs/iris/gallery_tests/test_plot_deriving_phenomena.py similarity index 67% rename from docs/iris/example_tests/test_deriving_phenomena.py rename to docs/iris/gallery_tests/test_plot_deriving_phenomena.py index 63cbf40ec0..b7378da9df 100644 --- a/docs/iris/example_tests/test_deriving_phenomena.py +++ b/docs/iris/gallery_tests/test_plot_deriving_phenomena.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestDerivingPhenomena(tests.GraphicsTest): - """Test the deriving_phenomena example code.""" + """Test the deriving_phenomena gallery code.""" - def test_deriving_phenomena(self): + def test_plot_deriving_phenomena(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import deriving_phenomena + with add_gallery_to_path(): + import plot_deriving_phenomena with show_replaced_by_check_graphic(self): - deriving_phenomena.main() + plot_deriving_phenomena.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_hovmoller.py b/docs/iris/gallery_tests/test_plot_global_map.py similarity index 69% rename from docs/iris/example_tests/test_hovmoller.py rename to docs/iris/gallery_tests/test_plot_global_map.py index b492baebbc..ece1c3a361 100644 --- a/docs/iris/example_tests/test_hovmoller.py +++ b/docs/iris/gallery_tests/test_plot_global_map.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestGlobalMap(tests.GraphicsTest): - """Test the hovmoller example code.""" + """Test the global_map gallery code.""" - def test_hovmoller(self): + def test_plot_global_map(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import hovmoller + with add_gallery_to_path(): + import plot_global_map with show_replaced_by_check_graphic(self): - hovmoller.main() + plot_global_map.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_global_map.py b/docs/iris/gallery_tests/test_plot_hovmoller.py similarity index 69% rename from docs/iris/example_tests/test_global_map.py rename to docs/iris/gallery_tests/test_plot_hovmoller.py index 1ec2a47ef6..23fb741e44 100644 --- a/docs/iris/example_tests/test_global_map.py +++ b/docs/iris/gallery_tests/test_plot_hovmoller.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestGlobalMap(tests.GraphicsTest): - """Test the global_map example code.""" + """Test the hovmoller gallery code.""" - def test_global_map(self): + def test_plot_hovmoller(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import global_map + with add_gallery_to_path(): + import plot_hovmoller with show_replaced_by_check_graphic(self): - global_map.main() + plot_hovmoller.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_inset_plot.py b/docs/iris/gallery_tests/test_plot_inset.py similarity index 70% rename from docs/iris/example_tests/test_inset_plot.py rename to docs/iris/gallery_tests/test_plot_inset.py index 58ef63bcac..e77b629c44 100644 --- a/docs/iris/example_tests/test_inset_plot.py +++ b/docs/iris/gallery_tests/test_plot_inset.py @@ -9,22 +9,22 @@ import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestInsetPlot(tests.GraphicsTest): - """Test the inset plot example code.""" + """Test the inset plot gallery code.""" - def test_inset_plot(self): + def test_plot_inset(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import inset_plot + with add_gallery_to_path(): + import plot_inset with show_replaced_by_check_graphic(self): - inset_plot.main() + plot_inset.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_lagged_ensemble.py b/docs/iris/gallery_tests/test_plot_lagged_ensemble.py similarity index 68% rename from docs/iris/example_tests/test_lagged_ensemble.py rename to docs/iris/gallery_tests/test_plot_lagged_ensemble.py index ecce499dc7..386ad7353c 100644 --- a/docs/iris/example_tests/test_lagged_ensemble.py +++ b/docs/iris/gallery_tests/test_plot_lagged_ensemble.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestLaggedEnsemble(tests.GraphicsTest): - """Test the lagged ensemble example code.""" + """Test the lagged ensemble gallery code.""" - def test_lagged_ensemble(self): + def test_plot_lagged_ensemble(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import lagged_ensemble + with add_gallery_to_path(): + import plot_lagged_ensemble with show_replaced_by_check_graphic(self): - lagged_ensemble.main() + plot_lagged_ensemble.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_lineplot_with_legend.py b/docs/iris/gallery_tests/test_plot_lineplot_with_legend.py similarity index 66% rename from docs/iris/example_tests/test_lineplot_with_legend.py rename to docs/iris/gallery_tests/test_plot_lineplot_with_legend.py index ca246b178a..edb4d7d305 100644 --- a/docs/iris/example_tests/test_lineplot_with_legend.py +++ b/docs/iris/gallery_tests/test_plot_lineplot_with_legend.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestLineplotWithLegend(tests.GraphicsTest): - """Test the lineplot_with_legend example code.""" + """Test the lineplot_with_legend gallery code.""" - def test_lineplot_with_legend(self): + def test_plot_lineplot_with_legend(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import lineplot_with_legend + with add_gallery_to_path(): + import plot_lineplot_with_legend with show_replaced_by_check_graphic(self): - lineplot_with_legend.main() + plot_lineplot_with_legend.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_load_nemo.py b/docs/iris/gallery_tests/test_plot_load_nemo.py similarity index 69% rename from docs/iris/example_tests/test_load_nemo.py rename to docs/iris/gallery_tests/test_plot_load_nemo.py index 3d9b5bba23..58a5bbf72a 100644 --- a/docs/iris/example_tests/test_load_nemo.py +++ b/docs/iris/gallery_tests/test_plot_load_nemo.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestLoadNemo(tests.GraphicsTest): - """Test the load_nemo example code.""" + """Test the load_nemo gallery code.""" - def test_load_nemo(self): + def test_plot_load_nemo(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import load_nemo + with add_gallery_to_path(): + import plot_load_nemo with show_replaced_by_check_graphic(self): - load_nemo.main() + plot_load_nemo.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_orca_projection.py b/docs/iris/gallery_tests/test_plot_orca_projection.py similarity index 68% rename from docs/iris/example_tests/test_orca_projection.py rename to docs/iris/gallery_tests/test_plot_orca_projection.py index 1854f68aa6..2b6fae4b1b 100644 --- a/docs/iris/example_tests/test_orca_projection.py +++ b/docs/iris/gallery_tests/test_plot_orca_projection.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestOrcaProjection(tests.GraphicsTest): - """Test the orca projection example code.""" + """Test the orca projection gallery code.""" - def test_orca_projection(self): + def test_plot_orca_projection(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import orca_projection + with add_gallery_to_path(): + import plot_orca_projection with show_replaced_by_check_graphic(self): - orca_projection.main() + plot_orca_projection.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_polar_stereo.py b/docs/iris/gallery_tests/test_plot_polar_stereo.py similarity index 69% rename from docs/iris/example_tests/test_polar_stereo.py rename to docs/iris/gallery_tests/test_plot_polar_stereo.py index 63581e7707..3cd7dfa482 100644 --- a/docs/iris/example_tests/test_polar_stereo.py +++ b/docs/iris/gallery_tests/test_plot_polar_stereo.py @@ -8,23 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) -@tests.skip_grib class TestPolarStereo(tests.GraphicsTest): - """Test the polar_stereo example code.""" + """Test the polar_stereo gallery code.""" - def test_polar_stereo(self): + def test_plot_polar_stereo(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import polar_stereo + with add_gallery_to_path(): + import plot_polar_stereo with show_replaced_by_check_graphic(self): - polar_stereo.main() + plot_polar_stereo.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_polynomial_fit.py b/docs/iris/gallery_tests/test_plot_polynomial_fit.py similarity index 68% rename from docs/iris/example_tests/test_polynomial_fit.py rename to docs/iris/gallery_tests/test_plot_polynomial_fit.py index 6e1b148e19..5b47b46688 100644 --- a/docs/iris/example_tests/test_polynomial_fit.py +++ b/docs/iris/gallery_tests/test_plot_polynomial_fit.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestPolynomialFit(tests.GraphicsTest): - """Test the polynomial_fit example code.""" + """Test the polynomial_fit gallery code.""" - def test_polynomial_fit(self): + def test_plot_polynomial_fit(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import polynomial_fit + with add_gallery_to_path(): + import plot_polynomial_fit with show_replaced_by_check_graphic(self): - polynomial_fit.main() + plot_polynomial_fit.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_projections_and_annotations.py b/docs/iris/gallery_tests/test_plot_projections_and_annotations.py similarity index 65% rename from docs/iris/example_tests/test_projections_and_annotations.py rename to docs/iris/gallery_tests/test_plot_projections_and_annotations.py index f273e040e4..7052414011 100644 --- a/docs/iris/example_tests/test_projections_and_annotations.py +++ b/docs/iris/gallery_tests/test_plot_projections_and_annotations.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestProjectionsAndAnnotations(tests.GraphicsTest): - """Test the atlantic_profiles example code.""" + """Test the atlantic_profiles gallery code.""" - def test_projections_and_annotations(self): + def test_plot_projections_and_annotations(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import projections_and_annotations + with add_gallery_to_path(): + import plot_projections_and_annotations with show_replaced_by_check_graphic(self): - projections_and_annotations.main() + plot_projections_and_annotations.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_rotated_pole_mapping.py b/docs/iris/gallery_tests/test_plot_rotated_pole_mapping.py similarity index 66% rename from docs/iris/example_tests/test_rotated_pole_mapping.py rename to docs/iris/gallery_tests/test_plot_rotated_pole_mapping.py index 4395b0519a..fa11a60a9c 100644 --- a/docs/iris/example_tests/test_rotated_pole_mapping.py +++ b/docs/iris/gallery_tests/test_plot_rotated_pole_mapping.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestRotatedPoleMapping(tests.GraphicsTest): - """Test the rotated_pole_mapping example code.""" + """Test the rotated_pole_mapping gallery code.""" - def test_rotated_pole_mapping(self): + def test_plot_rotated_pole_mapping(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import rotated_pole_mapping + with add_gallery_to_path(): + import plot_rotated_pole_mapping with show_replaced_by_check_graphic(self): - rotated_pole_mapping.main() + plot_rotated_pole_mapping.main() if __name__ == "__main__": diff --git a/docs/iris/example_tests/test_wind_speed.py b/docs/iris/gallery_tests/test_plot_wind_speed.py similarity index 69% rename from docs/iris/example_tests/test_wind_speed.py rename to docs/iris/gallery_tests/test_plot_wind_speed.py index 1cd4402fdb..7a0be601a5 100644 --- a/docs/iris/example_tests/test_wind_speed.py +++ b/docs/iris/gallery_tests/test_plot_wind_speed.py @@ -8,22 +8,22 @@ # importing anything else. import iris.tests as tests -from .extest_util import ( - add_examples_to_path, +from .gallerytest_util import ( + add_gallery_to_path, show_replaced_by_check_graphic, fail_any_deprecation_warnings, ) class TestWindSpeed(tests.GraphicsTest): - """Test the wind_speed example code.""" + """Test the wind_speed gallery code.""" - def test_wind_speed(self): + def test_plot_wind_speed(self): with fail_any_deprecation_warnings(): - with add_examples_to_path(): - import wind_speed + with add_gallery_to_path(): + import plot_wind_speed with show_replaced_by_check_graphic(self): - wind_speed.main() + plot_wind_speed.main() if __name__ == "__main__": diff --git a/docs/iris/src/Makefile b/docs/iris/src/Makefile index 53d224874d..0aa921fd2a 100644 --- a/docs/iris/src/Makefile +++ b/docs/iris/src/Makefile @@ -2,18 +2,18 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = -BUILDDIR = ../build -SRCDIR = ./ +BUILDDIR = _build +SRCDIR = . # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest +.PHONY: help clean html html-noplot dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @@ -35,34 +35,43 @@ help: @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: - -rm -rf $(BUILDDIR)/* - -rm -rf $(SRCDIR)/iris - -rm -rf $(SRCDIR)/examples $(SRCDIR)/_templates/gallery.html $(SRCDIR)/_static/random_image.js $(SRCDIR)/_static/random.js + -rm -rf $(BUILDDIR) + -rm -rf $(SRCDIR)/generated html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html" + +html-noplot: + $(SPHINXBUILD) -D plot_gallery=0 -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML (no gallery) pages are in $(BUILDDIR)/html" + +spelling: + $(SPHINXBUILD) -b spelling $(SRCDIR) $(BUILDDIR) + @echo + @echo "Build finished. The HTML (no gallery) pages are in $(BUILDDIR)/html" dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml" singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml" pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo - @echo "Build finished; now you can process the pickle files." + @echo "Build finished; now you can process the pickle files" json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo - @echo "Build finished; now you can process the JSON files." + @echo "Build finished; now you can process the JSON files" htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @@ -91,7 +100,7 @@ devhelp: epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + @echo "Build finished. The epub file is in $(BUILDDIR)/epub" latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @@ -104,7 +113,7 @@ latexpdf: latex $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex" text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @@ -114,7 +123,7 @@ text: man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + @echo "Build finished. The manual pages are in $(BUILDDIR)/man" changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes diff --git a/docs/iris/src/_static/copybutton.js b/docs/iris/src/_static/copybutton.js deleted file mode 100644 index 6800c3cb93..0000000000 --- a/docs/iris/src/_static/copybutton.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2013 PSF. Licensed under the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -// File originates from the cpython source found in Doc/tools/sphinxext/static/copybutton.js - -$(document).ready(function() { - /* Add a [>>>] button on the top-right corner of code samples to hide - * the >>> and ... prompts and the output and thus make the code - * copyable. */ - var div = $('.highlight-python .highlight,' + - '.highlight-python3 .highlight') - var pre = div.find('pre'); - - // get the styles from the current theme - pre.parent().parent().css('position', 'relative'); - var hide_text = 'Hide the prompts and output'; - var show_text = 'Show the prompts and output'; - var border_width = pre.css('border-top-width'); - var border_style = pre.css('border-top-style'); - var border_color = pre.css('border-top-color'); - var button_styles = { - 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', - 'border-color': border_color, 'border-style': border_style, - 'border-width': border_width, 'color': border_color, 'text-size': '75%', - 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em' - } - - // create and add the button to all the code blocks that contain >>> - div.each(function(index) { - var jthis = $(this); - if (jthis.find('.gp').length > 0) { - var button = $('>>>'); - button.css(button_styles) - button.attr('title', hide_text); - jthis.prepend(button); - } - // tracebacks (.gt) contain bare text elements that need to be - // wrapped in a span to work with .nextUntil() (see later) - jthis.find('pre:has(.gt)').contents().filter(function() { - return ((this.nodeType == 3) && (this.data.trim().length > 0)); - }).wrap(''); - }); - - // define the behavior of the button when it's clicked - $('.copybutton').toggle( - function() { - var button = $(this); - button.parent().find('.go, .gp, .gt').hide(); - button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); - button.css('text-decoration', 'line-through'); - button.attr('title', show_text); - }, - function() { - var button = $(this); - button.parent().find('.go, .gp, .gt').show(); - button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); - button.css('text-decoration', 'none'); - button.attr('title', hide_text); - }); -}); - diff --git a/docs/iris/src/_static/favicon-16x16.png b/docs/iris/src/_static/favicon-16x16.png deleted file mode 100644 index e2ea456770..0000000000 Binary files a/docs/iris/src/_static/favicon-16x16.png and /dev/null differ diff --git a/docs/iris/src/_static/favicon-32x32.png b/docs/iris/src/_static/favicon-32x32.png deleted file mode 100644 index 2f90c60eb5..0000000000 Binary files a/docs/iris/src/_static/favicon-32x32.png and /dev/null differ diff --git a/docs/iris/src/_static/favicon.ico b/docs/iris/src/_static/favicon.ico new file mode 100644 index 0000000000..0e5f0492b4 Binary files /dev/null and b/docs/iris/src/_static/favicon.ico differ diff --git a/docs/iris/src/_static/iris-logo-title.png b/docs/iris/src/_static/iris-logo-title.png new file mode 100644 index 0000000000..e517aa7784 Binary files /dev/null and b/docs/iris/src/_static/iris-logo-title.png differ diff --git a/docs/iris/src/_static/iris-logo-title.svg b/docs/iris/src/_static/iris-logo-title.svg new file mode 100644 index 0000000000..60ba0a1118 --- /dev/null +++ b/docs/iris/src/_static/iris-logo-title.svg @@ -0,0 +1,89 @@ + + + + + + + + + + image/svg+xml + + + + + + + + Iris + + diff --git a/docs/iris/src/_static/iris_colour_logo_centred.png b/docs/iris/src/_static/iris_colour_logo_centred.png deleted file mode 100755 index 2a1bebc5f3..0000000000 Binary files a/docs/iris/src/_static/iris_colour_logo_centred.png and /dev/null differ diff --git a/docs/iris/src/_static/jquery.cycle.all.latest.js b/docs/iris/src/_static/jquery.cycle.all.latest.js deleted file mode 100644 index 75d7ab98f8..0000000000 --- a/docs/iris/src/_static/jquery.cycle.all.latest.js +++ /dev/null @@ -1,1331 +0,0 @@ -/*! - * jQuery Cycle Plugin (with Transition Definitions) - * Examples and documentation at: http://jquery.malsup.com/cycle/ - * Copyright (c) 2007-2010 M. Alsup - * Version: 2.88 (08-JUN-2010) - * Dual licensed under the MIT and GPL licenses. - * http://jquery.malsup.com/license.html - * Requires: jQuery v1.2.6 or later - */ -;(function($) { - -var ver = '2.88'; - -// if $.support is not defined (pre jQuery 1.3) add what I need -if ($.support == undefined) { - $.support = { - opacity: !($.browser.msie) - }; -} - -function debug(s) { - if ($.fn.cycle.debug) - log(s); -} -function log() { - if (window.console && window.console.log) - window.console.log('[cycle] ' + Array.prototype.join.call(arguments,' ')); -}; - -// the options arg can be... -// a number - indicates an immediate transition should occur to the given slide index -// a string - 'pause', 'resume', 'toggle', 'next', 'prev', 'stop', 'destroy' or the name of a transition effect (ie, 'fade', 'zoom', etc) -// an object - properties to control the slideshow -// -// the arg2 arg can be... -// the name of an fx (only used in conjunction with a numeric value for 'options') -// the value true (only used in first arg == 'resume') and indicates -// that the resume should occur immediately (not wait for next timeout) - -$.fn.cycle = function(options, arg2) { - var o = { s: this.selector, c: this.context }; - - // in 1.3+ we can fix mistakes with the ready state - if (this.length === 0 && options != 'stop') { - if (!$.isReady && o.s) { - log('DOM not ready, queuing slideshow'); - $(function() { - $(o.s,o.c).cycle(options,arg2); - }); - return this; - } - // is your DOM ready? http://docs.jquery.com/Tutorials:Introducing_$(document).ready() - log('terminating; zero elements found by selector' + ($.isReady ? '' : ' (DOM not ready)')); - return this; - } - - // iterate the matched nodeset - return this.each(function() { - var opts = handleArguments(this, options, arg2); - if (opts === false) - return; - - opts.updateActivePagerLink = opts.updateActivePagerLink || $.fn.cycle.updateActivePagerLink; - - // stop existing slideshow for this container (if there is one) - if (this.cycleTimeout) - clearTimeout(this.cycleTimeout); - this.cycleTimeout = this.cyclePause = 0; - - var $cont = $(this); - var $slides = opts.slideExpr ? $(opts.slideExpr, this) : $cont.children(); - var els = $slides.get(); - if (els.length < 2) { - log('terminating; too few slides: ' + els.length); - return; - } - - var opts2 = buildOptions($cont, $slides, els, opts, o); - if (opts2 === false) - return; - - var startTime = opts2.continuous ? 10 : getTimeout(els[opts2.currSlide], els[opts2.nextSlide], opts2, !opts2.rev); - - // if it's an auto slideshow, kick it off - if (startTime) { - startTime += (opts2.delay || 0); - if (startTime < 10) - startTime = 10; - debug('first timeout: ' + startTime); - this.cycleTimeout = setTimeout(function(){go(els,opts2,0,(!opts2.rev && !opts.backwards))}, startTime); - } - }); -}; - -// process the args that were passed to the plugin fn -function handleArguments(cont, options, arg2) { - if (cont.cycleStop == undefined) - cont.cycleStop = 0; - if (options === undefined || options === null) - options = {}; - if (options.constructor == String) { - switch(options) { - case 'destroy': - case 'stop': - var opts = $(cont).data('cycle.opts'); - if (!opts) - return false; - cont.cycleStop++; // callbacks look for change - if (cont.cycleTimeout) - clearTimeout(cont.cycleTimeout); - cont.cycleTimeout = 0; - $(cont).removeData('cycle.opts'); - if (options == 'destroy') - destroy(opts); - return false; - case 'toggle': - cont.cyclePause = (cont.cyclePause === 1) ? 0 : 1; - checkInstantResume(cont.cyclePause, arg2, cont); - return false; - case 'pause': - cont.cyclePause = 1; - return false; - case 'resume': - cont.cyclePause = 0; - checkInstantResume(false, arg2, cont); - return false; - case 'prev': - case 'next': - var opts = $(cont).data('cycle.opts'); - if (!opts) { - log('options not found, "prev/next" ignored'); - return false; - } - $.fn.cycle[options](opts); - return false; - default: - options = { fx: options }; - }; - return options; - } - else if (options.constructor == Number) { - // go to the requested slide - var num = options; - options = $(cont).data('cycle.opts'); - if (!options) { - log('options not found, can not advance slide'); - return false; - } - if (num < 0 || num >= options.elements.length) { - log('invalid slide index: ' + num); - return false; - } - options.nextSlide = num; - if (cont.cycleTimeout) { - clearTimeout(cont.cycleTimeout); - cont.cycleTimeout = 0; - } - if (typeof arg2 == 'string') - options.oneTimeFx = arg2; - go(options.elements, options, 1, num >= options.currSlide); - return false; - } - return options; - - function checkInstantResume(isPaused, arg2, cont) { - if (!isPaused && arg2 === true) { // resume now! - var options = $(cont).data('cycle.opts'); - if (!options) { - log('options not found, can not resume'); - return false; - } - if (cont.cycleTimeout) { - clearTimeout(cont.cycleTimeout); - cont.cycleTimeout = 0; - } - go(options.elements, options, 1, (!opts.rev && !opts.backwards)); - } - } -}; - -function removeFilter(el, opts) { - if (!$.support.opacity && opts.cleartype && el.style.filter) { - try { el.style.removeAttribute('filter'); } - catch(smother) {} // handle old opera versions - } -}; - -// unbind event handlers -function destroy(opts) { - if (opts.next) - $(opts.next).unbind(opts.prevNextEvent); - if (opts.prev) - $(opts.prev).unbind(opts.prevNextEvent); - - if (opts.pager || opts.pagerAnchorBuilder) - $.each(opts.pagerAnchors || [], function() { - this.unbind().remove(); - }); - opts.pagerAnchors = null; - if (opts.destroy) // callback - opts.destroy(opts); -}; - -// one-time initialization -function buildOptions($cont, $slides, els, options, o) { - // support metadata plugin (v1.0 and v2.0) - var opts = $.extend({}, $.fn.cycle.defaults, options || {}, $.metadata ? $cont.metadata() : $.meta ? $cont.data() : {}); - if (opts.autostop) - opts.countdown = opts.autostopCount || els.length; - - var cont = $cont[0]; - $cont.data('cycle.opts', opts); - opts.$cont = $cont; - opts.stopCount = cont.cycleStop; - opts.elements = els; - opts.before = opts.before ? [opts.before] : []; - opts.after = opts.after ? [opts.after] : []; - opts.after.unshift(function(){ opts.busy=0; }); - - // push some after callbacks - if (!$.support.opacity && opts.cleartype) - opts.after.push(function() { removeFilter(this, opts); }); - if (opts.continuous) - opts.after.push(function() { go(els,opts,0,(!opts.rev && !opts.backwards)); }); - - saveOriginalOpts(opts); - - // clearType corrections - if (!$.support.opacity && opts.cleartype && !opts.cleartypeNoBg) - clearTypeFix($slides); - - // container requires non-static position so that slides can be position within - if ($cont.css('position') == 'static') - $cont.css('position', 'relative'); - if (opts.width) - $cont.width(opts.width); - if (opts.height && opts.height != 'auto') - $cont.height(opts.height); - - if (opts.startingSlide) - opts.startingSlide = parseInt(opts.startingSlide); - else if (opts.backwards) - opts.startingSlide = els.length - 1; - - // if random, mix up the slide array - if (opts.random) { - opts.randomMap = []; - for (var i = 0; i < els.length; i++) - opts.randomMap.push(i); - opts.randomMap.sort(function(a,b) {return Math.random() - 0.5;}); - opts.randomIndex = 1; - opts.startingSlide = opts.randomMap[1]; - } - else if (opts.startingSlide >= els.length) - opts.startingSlide = 0; // catch bogus input - opts.currSlide = opts.startingSlide || 0; - var first = opts.startingSlide; - - // set position and zIndex on all the slides - $slides.css({position: 'absolute', top:0, left:0}).hide().each(function(i) { - var z; - if (opts.backwards) - z = first ? i <= first ? els.length + (i-first) : first-i : els.length-i; - else - z = first ? i >= first ? els.length - (i-first) : first-i : els.length-i; - $(this).css('z-index', z) - }); - - // make sure first slide is visible - $(els[first]).css('opacity',1).show(); // opacity bit needed to handle restart use case - removeFilter(els[first], opts); - - // stretch slides - if (opts.fit && opts.width) - $slides.width(opts.width); - if (opts.fit && opts.height && opts.height != 'auto') - $slides.height(opts.height); - - // stretch container - var reshape = opts.containerResize && !$cont.innerHeight(); - if (reshape) { // do this only if container has no size http://tinyurl.com/da2oa9 - var maxw = 0, maxh = 0; - for(var j=0; j < els.length; j++) { - var $e = $(els[j]), e = $e[0], w = $e.outerWidth(), h = $e.outerHeight(); - if (!w) w = e.offsetWidth || e.width || $e.attr('width') - if (!h) h = e.offsetHeight || e.height || $e.attr('height'); - maxw = w > maxw ? w : maxw; - maxh = h > maxh ? h : maxh; - } - if (maxw > 0 && maxh > 0) - $cont.css({width:maxw+'px',height:maxh+'px'}); - } - - if (opts.pause) - $cont.hover(function(){this.cyclePause++;},function(){this.cyclePause--;}); - - if (supportMultiTransitions(opts) === false) - return false; - - // apparently a lot of people use image slideshows without height/width attributes on the images. - // Cycle 2.50+ requires the sizing info for every slide; this block tries to deal with that. - var requeue = false; - options.requeueAttempts = options.requeueAttempts || 0; - $slides.each(function() { - // try to get height/width of each slide - var $el = $(this); - this.cycleH = (opts.fit && opts.height) ? opts.height : ($el.height() || this.offsetHeight || this.height || $el.attr('height') || 0); - this.cycleW = (opts.fit && opts.width) ? opts.width : ($el.width() || this.offsetWidth || this.width || $el.attr('width') || 0); - - if ( $el.is('img') ) { - // sigh.. sniffing, hacking, shrugging... this crappy hack tries to account for what browsers do when - // an image is being downloaded and the markup did not include sizing info (height/width attributes); - // there seems to be some "default" sizes used in this situation - var loadingIE = ($.browser.msie && this.cycleW == 28 && this.cycleH == 30 && !this.complete); - var loadingFF = ($.browser.mozilla && this.cycleW == 34 && this.cycleH == 19 && !this.complete); - var loadingOp = ($.browser.opera && ((this.cycleW == 42 && this.cycleH == 19) || (this.cycleW == 37 && this.cycleH == 17)) && !this.complete); - var loadingOther = (this.cycleH == 0 && this.cycleW == 0 && !this.complete); - // don't requeue for images that are still loading but have a valid size - if (loadingIE || loadingFF || loadingOp || loadingOther) { - if (o.s && opts.requeueOnImageNotLoaded && ++options.requeueAttempts < 100) { // track retry count so we don't loop forever - log(options.requeueAttempts,' - img slide not loaded, requeuing slideshow: ', this.src, this.cycleW, this.cycleH); - setTimeout(function() {$(o.s,o.c).cycle(options)}, opts.requeueTimeout); - requeue = true; - return false; // break each loop - } - else { - log('could not determine size of image: '+this.src, this.cycleW, this.cycleH); - } - } - } - return true; - }); - - if (requeue) - return false; - - opts.cssBefore = opts.cssBefore || {}; - opts.animIn = opts.animIn || {}; - opts.animOut = opts.animOut || {}; - - $slides.not(':eq('+first+')').css(opts.cssBefore); - if (opts.cssFirst) - $($slides[first]).css(opts.cssFirst); - - if (opts.timeout) { - opts.timeout = parseInt(opts.timeout); - // ensure that timeout and speed settings are sane - if (opts.speed.constructor == String) - opts.speed = $.fx.speeds[opts.speed] || parseInt(opts.speed); - if (!opts.sync) - opts.speed = opts.speed / 2; - - var buffer = opts.fx == 'shuffle' ? 500 : 250; - while((opts.timeout - opts.speed) < buffer) // sanitize timeout - opts.timeout += opts.speed; - } - if (opts.easing) - opts.easeIn = opts.easeOut = opts.easing; - if (!opts.speedIn) - opts.speedIn = opts.speed; - if (!opts.speedOut) - opts.speedOut = opts.speed; - - opts.slideCount = els.length; - opts.currSlide = opts.lastSlide = first; - if (opts.random) { - if (++opts.randomIndex == els.length) - opts.randomIndex = 0; - opts.nextSlide = opts.randomMap[opts.randomIndex]; - } - else if (opts.backwards) - opts.nextSlide = opts.startingSlide == 0 ? (els.length-1) : opts.startingSlide-1; - else - opts.nextSlide = opts.startingSlide >= (els.length-1) ? 0 : opts.startingSlide+1; - - // run transition init fn - if (!opts.multiFx) { - var init = $.fn.cycle.transitions[opts.fx]; - if ($.isFunction(init)) - init($cont, $slides, opts); - else if (opts.fx != 'custom' && !opts.multiFx) { - log('unknown transition: ' + opts.fx,'; slideshow terminating'); - return false; - } - } - - // fire artificial events - var e0 = $slides[first]; - if (opts.before.length) - opts.before[0].apply(e0, [e0, e0, opts, true]); - if (opts.after.length > 1) - opts.after[1].apply(e0, [e0, e0, opts, true]); - - if (opts.next) - $(opts.next).bind(opts.prevNextEvent,function(){return advance(opts,opts.rev?-1:1)}); - if (opts.prev) - $(opts.prev).bind(opts.prevNextEvent,function(){return advance(opts,opts.rev?1:-1)}); - if (opts.pager || opts.pagerAnchorBuilder) - buildPager(els,opts); - - exposeAddSlide(opts, els); - - return opts; -}; - -// save off original opts so we can restore after clearing state -function saveOriginalOpts(opts) { - opts.original = { before: [], after: [] }; - opts.original.cssBefore = $.extend({}, opts.cssBefore); - opts.original.cssAfter = $.extend({}, opts.cssAfter); - opts.original.animIn = $.extend({}, opts.animIn); - opts.original.animOut = $.extend({}, opts.animOut); - $.each(opts.before, function() { opts.original.before.push(this); }); - $.each(opts.after, function() { opts.original.after.push(this); }); -}; - -function supportMultiTransitions(opts) { - var i, tx, txs = $.fn.cycle.transitions; - // look for multiple effects - if (opts.fx.indexOf(',') > 0) { - opts.multiFx = true; - opts.fxs = opts.fx.replace(/\s*/g,'').split(','); - // discard any bogus effect names - for (i=0; i < opts.fxs.length; i++) { - var fx = opts.fxs[i]; - tx = txs[fx]; - if (!tx || !txs.hasOwnProperty(fx) || !$.isFunction(tx)) { - log('discarding unknown transition: ',fx); - opts.fxs.splice(i,1); - i--; - } - } - // if we have an empty list then we threw everything away! - if (!opts.fxs.length) { - log('No valid transitions named; slideshow terminating.'); - return false; - } - } - else if (opts.fx == 'all') { // auto-gen the list of transitions - opts.multiFx = true; - opts.fxs = []; - for (p in txs) { - tx = txs[p]; - if (txs.hasOwnProperty(p) && $.isFunction(tx)) - opts.fxs.push(p); - } - } - if (opts.multiFx && opts.randomizeEffects) { - // munge the fxs array to make effect selection random - var r1 = Math.floor(Math.random() * 20) + 30; - for (i = 0; i < r1; i++) { - var r2 = Math.floor(Math.random() * opts.fxs.length); - opts.fxs.push(opts.fxs.splice(r2,1)[0]); - } - debug('randomized fx sequence: ',opts.fxs); - } - return true; -}; - -// provide a mechanism for adding slides after the slideshow has started -function exposeAddSlide(opts, els) { - opts.addSlide = function(newSlide, prepend) { - var $s = $(newSlide), s = $s[0]; - if (!opts.autostopCount) - opts.countdown++; - els[prepend?'unshift':'push'](s); - if (opts.els) - opts.els[prepend?'unshift':'push'](s); // shuffle needs this - opts.slideCount = els.length; - - $s.css('position','absolute'); - $s[prepend?'prependTo':'appendTo'](opts.$cont); - - if (prepend) { - opts.currSlide++; - opts.nextSlide++; - } - - if (!$.support.opacity && opts.cleartype && !opts.cleartypeNoBg) - clearTypeFix($s); - - if (opts.fit && opts.width) - $s.width(opts.width); - if (opts.fit && opts.height && opts.height != 'auto') - $slides.height(opts.height); - s.cycleH = (opts.fit && opts.height) ? opts.height : $s.height(); - s.cycleW = (opts.fit && opts.width) ? opts.width : $s.width(); - - $s.css(opts.cssBefore); - - if (opts.pager || opts.pagerAnchorBuilder) - $.fn.cycle.createPagerAnchor(els.length-1, s, $(opts.pager), els, opts); - - if ($.isFunction(opts.onAddSlide)) - opts.onAddSlide($s); - else - $s.hide(); // default behavior - }; -} - -// reset internal state; we do this on every pass in order to support multiple effects -$.fn.cycle.resetState = function(opts, fx) { - fx = fx || opts.fx; - opts.before = []; opts.after = []; - opts.cssBefore = $.extend({}, opts.original.cssBefore); - opts.cssAfter = $.extend({}, opts.original.cssAfter); - opts.animIn = $.extend({}, opts.original.animIn); - opts.animOut = $.extend({}, opts.original.animOut); - opts.fxFn = null; - $.each(opts.original.before, function() { opts.before.push(this); }); - $.each(opts.original.after, function() { opts.after.push(this); }); - - // re-init - var init = $.fn.cycle.transitions[fx]; - if ($.isFunction(init)) - init(opts.$cont, $(opts.elements), opts); -}; - -// this is the main engine fn, it handles the timeouts, callbacks and slide index mgmt -function go(els, opts, manual, fwd) { - // opts.busy is true if we're in the middle of an animation - if (manual && opts.busy && opts.manualTrump) { - // let manual transitions requests trump active ones - debug('manualTrump in go(), stopping active transition'); - $(els).stop(true,true); - opts.busy = false; - } - // don't begin another timeout-based transition if there is one active - if (opts.busy) { - debug('transition active, ignoring new tx request'); - return; - } - - var p = opts.$cont[0], curr = els[opts.currSlide], next = els[opts.nextSlide]; - - // stop cycling if we have an outstanding stop request - if (p.cycleStop != opts.stopCount || p.cycleTimeout === 0 && !manual) - return; - - // check to see if we should stop cycling based on autostop options - if (!manual && !p.cyclePause && !opts.bounce && - ((opts.autostop && (--opts.countdown <= 0)) || - (opts.nowrap && !opts.random && opts.nextSlide < opts.currSlide))) { - if (opts.end) - opts.end(opts); - return; - } - - // if slideshow is paused, only transition on a manual trigger - var changed = false; - if ((manual || !p.cyclePause) && (opts.nextSlide != opts.currSlide)) { - changed = true; - var fx = opts.fx; - // keep trying to get the slide size if we don't have it yet - curr.cycleH = curr.cycleH || $(curr).height(); - curr.cycleW = curr.cycleW || $(curr).width(); - next.cycleH = next.cycleH || $(next).height(); - next.cycleW = next.cycleW || $(next).width(); - - // support multiple transition types - if (opts.multiFx) { - if (opts.lastFx == undefined || ++opts.lastFx >= opts.fxs.length) - opts.lastFx = 0; - fx = opts.fxs[opts.lastFx]; - opts.currFx = fx; - } - - // one-time fx overrides apply to: $('div').cycle(3,'zoom'); - if (opts.oneTimeFx) { - fx = opts.oneTimeFx; - opts.oneTimeFx = null; - } - - $.fn.cycle.resetState(opts, fx); - - // run the before callbacks - if (opts.before.length) - $.each(opts.before, function(i,o) { - if (p.cycleStop != opts.stopCount) return; - o.apply(next, [curr, next, opts, fwd]); - }); - - // stage the after callacks - var after = function() { - $.each(opts.after, function(i,o) { - if (p.cycleStop != opts.stopCount) return; - o.apply(next, [curr, next, opts, fwd]); - }); - }; - - debug('tx firing; currSlide: ' + opts.currSlide + '; nextSlide: ' + opts.nextSlide); - - // get ready to perform the transition - opts.busy = 1; - if (opts.fxFn) // fx function provided? - opts.fxFn(curr, next, opts, after, fwd, manual && opts.fastOnEvent); - else if ($.isFunction($.fn.cycle[opts.fx])) // fx plugin ? - $.fn.cycle[opts.fx](curr, next, opts, after, fwd, manual && opts.fastOnEvent); - else - $.fn.cycle.custom(curr, next, opts, after, fwd, manual && opts.fastOnEvent); - } - - if (changed || opts.nextSlide == opts.currSlide) { - // calculate the next slide - opts.lastSlide = opts.currSlide; - if (opts.random) { - opts.currSlide = opts.nextSlide; - if (++opts.randomIndex == els.length) - opts.randomIndex = 0; - opts.nextSlide = opts.randomMap[opts.randomIndex]; - if (opts.nextSlide == opts.currSlide) - opts.nextSlide = (opts.currSlide == opts.slideCount - 1) ? 0 : opts.currSlide + 1; - } - else if (opts.backwards) { - var roll = (opts.nextSlide - 1) < 0; - if (roll && opts.bounce) { - opts.backwards = !opts.backwards; - opts.nextSlide = 1; - opts.currSlide = 0; - } - else { - opts.nextSlide = roll ? (els.length-1) : opts.nextSlide-1; - opts.currSlide = roll ? 0 : opts.nextSlide+1; - } - } - else { // sequence - var roll = (opts.nextSlide + 1) == els.length; - if (roll && opts.bounce) { - opts.backwards = !opts.backwards; - opts.nextSlide = els.length-2; - opts.currSlide = els.length-1; - } - else { - opts.nextSlide = roll ? 0 : opts.nextSlide+1; - opts.currSlide = roll ? els.length-1 : opts.nextSlide-1; - } - } - } - if (changed && opts.pager) - opts.updateActivePagerLink(opts.pager, opts.currSlide, opts.activePagerClass); - - // stage the next transition - var ms = 0; - if (opts.timeout && !opts.continuous) - ms = getTimeout(els[opts.currSlide], els[opts.nextSlide], opts, fwd); - else if (opts.continuous && p.cyclePause) // continuous shows work off an after callback, not this timer logic - ms = 10; - if (ms > 0) - p.cycleTimeout = setTimeout(function(){ go(els, opts, 0, (!opts.rev && !opts.backwards)) }, ms); -}; - -// invoked after transition -$.fn.cycle.updateActivePagerLink = function(pager, currSlide, clsName) { - $(pager).each(function() { - $(this).children().removeClass(clsName).eq(currSlide).addClass(clsName); - }); -}; - -// calculate timeout value for current transition -function getTimeout(curr, next, opts, fwd) { - if (opts.timeoutFn) { - // call user provided calc fn - var t = opts.timeoutFn.call(curr,curr,next,opts,fwd); - while ((t - opts.speed) < 250) // sanitize timeout - t += opts.speed; - debug('calculated timeout: ' + t + '; speed: ' + opts.speed); - if (t !== false) - return t; - } - return opts.timeout; -}; - -// expose next/prev function, caller must pass in state -$.fn.cycle.next = function(opts) { advance(opts, opts.rev?-1:1); }; -$.fn.cycle.prev = function(opts) { advance(opts, opts.rev?1:-1);}; - -// advance slide forward or back -function advance(opts, val) { - var els = opts.elements; - var p = opts.$cont[0], timeout = p.cycleTimeout; - if (timeout) { - clearTimeout(timeout); - p.cycleTimeout = 0; - } - if (opts.random && val < 0) { - // move back to the previously display slide - opts.randomIndex--; - if (--opts.randomIndex == -2) - opts.randomIndex = els.length-2; - else if (opts.randomIndex == -1) - opts.randomIndex = els.length-1; - opts.nextSlide = opts.randomMap[opts.randomIndex]; - } - else if (opts.random) { - opts.nextSlide = opts.randomMap[opts.randomIndex]; - } - else { - opts.nextSlide = opts.currSlide + val; - if (opts.nextSlide < 0) { - if (opts.nowrap) return false; - opts.nextSlide = els.length - 1; - } - else if (opts.nextSlide >= els.length) { - if (opts.nowrap) return false; - opts.nextSlide = 0; - } - } - - var cb = opts.onPrevNextEvent || opts.prevNextClick; // prevNextClick is deprecated - if ($.isFunction(cb)) - cb(val > 0, opts.nextSlide, els[opts.nextSlide]); - go(els, opts, 1, val>=0); - return false; -}; - -function buildPager(els, opts) { - var $p = $(opts.pager); - $.each(els, function(i,o) { - $.fn.cycle.createPagerAnchor(i,o,$p,els,opts); - }); - opts.updateActivePagerLink(opts.pager, opts.startingSlide, opts.activePagerClass); -}; - -$.fn.cycle.createPagerAnchor = function(i, el, $p, els, opts) { - var a; - if ($.isFunction(opts.pagerAnchorBuilder)) { - a = opts.pagerAnchorBuilder(i,el); - debug('pagerAnchorBuilder('+i+', el) returned: ' + a); - } - else - a = ''+(i+1)+''; - - if (!a) - return; - var $a = $(a); - // don't reparent if anchor is in the dom - if ($a.parents('body').length === 0) { - var arr = []; - if ($p.length > 1) { - $p.each(function() { - var $clone = $a.clone(true); - $(this).append($clone); - arr.push($clone[0]); - }); - $a = $(arr); - } - else { - $a.appendTo($p); - } - } - - opts.pagerAnchors = opts.pagerAnchors || []; - opts.pagerAnchors.push($a); - $a.bind(opts.pagerEvent, function(e) { - e.preventDefault(); - opts.nextSlide = i; - var p = opts.$cont[0], timeout = p.cycleTimeout; - if (timeout) { - clearTimeout(timeout); - p.cycleTimeout = 0; - } - var cb = opts.onPagerEvent || opts.pagerClick; // pagerClick is deprecated - if ($.isFunction(cb)) - cb(opts.nextSlide, els[opts.nextSlide]); - go(els,opts,1,opts.currSlide < i); // trigger the trans -// return false; // <== allow bubble - }); - - if ( ! /^click/.test(opts.pagerEvent) && !opts.allowPagerClickBubble) - $a.bind('click.cycle', function(){return false;}); // suppress click - - if (opts.pauseOnPagerHover) - $a.hover(function() { opts.$cont[0].cyclePause++; }, function() { opts.$cont[0].cyclePause--; } ); -}; - -// helper fn to calculate the number of slides between the current and the next -$.fn.cycle.hopsFromLast = function(opts, fwd) { - var hops, l = opts.lastSlide, c = opts.currSlide; - if (fwd) - hops = c > l ? c - l : opts.slideCount - l; - else - hops = c < l ? l - c : l + opts.slideCount - c; - return hops; -}; - -// fix clearType problems in ie6 by setting an explicit bg color -// (otherwise text slides look horrible during a fade transition) -function clearTypeFix($slides) { - debug('applying clearType background-color hack'); - function hex(s) { - s = parseInt(s).toString(16); - return s.length < 2 ? '0'+s : s; - }; - function getBg(e) { - for ( ; e && e.nodeName.toLowerCase() != 'html'; e = e.parentNode) { - var v = $.css(e,'background-color'); - if (v.indexOf('rgb') >= 0 ) { - var rgb = v.match(/\d+/g); - return '#'+ hex(rgb[0]) + hex(rgb[1]) + hex(rgb[2]); - } - if (v && v != 'transparent') - return v; - } - return '#ffffff'; - }; - $slides.each(function() { $(this).css('background-color', getBg(this)); }); -}; - -// reset common props before the next transition -$.fn.cycle.commonReset = function(curr,next,opts,w,h,rev) { - $(opts.elements).not(curr).hide(); - opts.cssBefore.opacity = 1; - opts.cssBefore.display = 'block'; - if (w !== false && next.cycleW > 0) - opts.cssBefore.width = next.cycleW; - if (h !== false && next.cycleH > 0) - opts.cssBefore.height = next.cycleH; - opts.cssAfter = opts.cssAfter || {}; - opts.cssAfter.display = 'none'; - $(curr).css('zIndex',opts.slideCount + (rev === true ? 1 : 0)); - $(next).css('zIndex',opts.slideCount + (rev === true ? 0 : 1)); -}; - -// the actual fn for effecting a transition -$.fn.cycle.custom = function(curr, next, opts, cb, fwd, speedOverride) { - var $l = $(curr), $n = $(next); - var speedIn = opts.speedIn, speedOut = opts.speedOut, easeIn = opts.easeIn, easeOut = opts.easeOut; - $n.css(opts.cssBefore); - if (speedOverride) { - if (typeof speedOverride == 'number') - speedIn = speedOut = speedOverride; - else - speedIn = speedOut = 1; - easeIn = easeOut = null; - } - var fn = function() {$n.animate(opts.animIn, speedIn, easeIn, cb)}; - $l.animate(opts.animOut, speedOut, easeOut, function() { - if (opts.cssAfter) $l.css(opts.cssAfter); - if (!opts.sync) fn(); - }); - if (opts.sync) fn(); -}; - -// transition definitions - only fade is defined here, transition pack defines the rest -$.fn.cycle.transitions = { - fade: function($cont, $slides, opts) { - $slides.not(':eq('+opts.currSlide+')').css('opacity',0); - opts.before.push(function(curr,next,opts) { - $.fn.cycle.commonReset(curr,next,opts); - opts.cssBefore.opacity = 0; - }); - opts.animIn = { opacity: 1 }; - opts.animOut = { opacity: 0 }; - opts.cssBefore = { top: 0, left: 0 }; - } -}; - -$.fn.cycle.ver = function() { return ver; }; - -// override these globally if you like (they are all optional) -$.fn.cycle.defaults = { - fx: 'fade', // name of transition effect (or comma separated names, ex: 'fade,scrollUp,shuffle') - timeout: 4000, // milliseconds between slide transitions (0 to disable auto advance) - timeoutFn: null, // callback for determining per-slide timeout value: function(currSlideElement, nextSlideElement, options, forwardFlag) - continuous: 0, // true to start next transition immediately after current one completes - speed: 1000, // speed of the transition (any valid fx speed value) - speedIn: null, // speed of the 'in' transition - speedOut: null, // speed of the 'out' transition - next: null, // selector for element to use as event trigger for next slide - prev: null, // selector for element to use as event trigger for previous slide -// prevNextClick: null, // @deprecated; please use onPrevNextEvent instead - onPrevNextEvent: null, // callback fn for prev/next events: function(isNext, zeroBasedSlideIndex, slideElement) - prevNextEvent:'click.cycle',// event which drives the manual transition to the previous or next slide - pager: null, // selector for element to use as pager container - //pagerClick null, // @deprecated; please use onPagerEvent instead - onPagerEvent: null, // callback fn for pager events: function(zeroBasedSlideIndex, slideElement) - pagerEvent: 'click.cycle', // name of event which drives the pager navigation - allowPagerClickBubble: false, // allows or prevents click event on pager anchors from bubbling - pagerAnchorBuilder: null, // callback fn for building anchor links: function(index, DOMelement) - before: null, // transition callback (scope set to element to be shown): function(currSlideElement, nextSlideElement, options, forwardFlag) - after: null, // transition callback (scope set to element that was shown): function(currSlideElement, nextSlideElement, options, forwardFlag) - end: null, // callback invoked when the slideshow terminates (use with autostop or nowrap options): function(options) - easing: null, // easing method for both in and out transitions - easeIn: null, // easing for "in" transition - easeOut: null, // easing for "out" transition - shuffle: null, // coords for shuffle animation, ex: { top:15, left: 200 } - animIn: null, // properties that define how the slide animates in - animOut: null, // properties that define how the slide animates out - cssBefore: null, // properties that define the initial state of the slide before transitioning in - cssAfter: null, // properties that defined the state of the slide after transitioning out - fxFn: null, // function used to control the transition: function(currSlideElement, nextSlideElement, options, afterCalback, forwardFlag) - height: 'auto', // container height - startingSlide: 0, // zero-based index of the first slide to be displayed - sync: 1, // true if in/out transitions should occur simultaneously - random: 0, // true for random, false for sequence (not applicable to shuffle fx) - fit: 0, // force slides to fit container - containerResize: 1, // resize container to fit largest slide - pause: 0, // true to enable "pause on hover" - pauseOnPagerHover: 0, // true to pause when hovering over pager link - autostop: 0, // true to end slideshow after X transitions (where X == slide count) - autostopCount: 0, // number of transitions (optionally used with autostop to define X) - delay: 0, // additional delay (in ms) for first transition (hint: can be negative) - slideExpr: null, // expression for selecting slides (if something other than all children is required) - cleartype: !$.support.opacity, // true if clearType corrections should be applied (for IE) - cleartypeNoBg: false, // set to true to disable extra cleartype fixing (leave false to force background color setting on slides) - nowrap: 0, // true to prevent slideshow from wrapping - fastOnEvent: 0, // force fast transitions when triggered manually (via pager or prev/next); value == time in ms - randomizeEffects: 1, // valid when multiple effects are used; true to make the effect sequence random - rev: 0, // causes animations to transition in reverse - manualTrump: true, // causes manual transition to stop an active transition instead of being ignored - requeueOnImageNotLoaded: true, // requeue the slideshow if any image slides are not yet loaded - requeueTimeout: 250, // ms delay for requeue - activePagerClass: 'activeSlide', // class name used for the active pager link - updateActivePagerLink: null, // callback fn invoked to update the active pager link (adds/removes activePagerClass style) - backwards: false // true to start slideshow at last slide and move backwards through the stack -}; - -})(jQuery); - - -/*! - * jQuery Cycle Plugin Transition Definitions - * This script is a plugin for the jQuery Cycle Plugin - * Examples and documentation at: http://malsup.com/jquery/cycle/ - * Copyright (c) 2007-2010 M. Alsup - * Version: 2.72 - * Dual licensed under the MIT and GPL licenses: - * http://www.opensource.org/licenses/mit-license.php - * http://www.gnu.org/licenses/gpl.html - */ -(function($) { - -// -// These functions define one-time slide initialization for the named -// transitions. To save file size feel free to remove any of these that you -// don't need. -// -$.fn.cycle.transitions.none = function($cont, $slides, opts) { - opts.fxFn = function(curr,next,opts,after){ - $(next).show(); - $(curr).hide(); - after(); - }; -} - -// scrollUp/Down/Left/Right -$.fn.cycle.transitions.scrollUp = function($cont, $slides, opts) { - $cont.css('overflow','hidden'); - opts.before.push($.fn.cycle.commonReset); - var h = $cont.height(); - opts.cssBefore ={ top: h, left: 0 }; - opts.cssFirst = { top: 0 }; - opts.animIn = { top: 0 }; - opts.animOut = { top: -h }; -}; -$.fn.cycle.transitions.scrollDown = function($cont, $slides, opts) { - $cont.css('overflow','hidden'); - opts.before.push($.fn.cycle.commonReset); - var h = $cont.height(); - opts.cssFirst = { top: 0 }; - opts.cssBefore= { top: -h, left: 0 }; - opts.animIn = { top: 0 }; - opts.animOut = { top: h }; -}; -$.fn.cycle.transitions.scrollLeft = function($cont, $slides, opts) { - $cont.css('overflow','hidden'); - opts.before.push($.fn.cycle.commonReset); - var w = $cont.width(); - opts.cssFirst = { left: 0 }; - opts.cssBefore= { left: w, top: 0 }; - opts.animIn = { left: 0 }; - opts.animOut = { left: 0-w }; -}; -$.fn.cycle.transitions.scrollRight = function($cont, $slides, opts) { - $cont.css('overflow','hidden'); - opts.before.push($.fn.cycle.commonReset); - var w = $cont.width(); - opts.cssFirst = { left: 0 }; - opts.cssBefore= { left: -w, top: 0 }; - opts.animIn = { left: 0 }; - opts.animOut = { left: w }; -}; -$.fn.cycle.transitions.scrollHorz = function($cont, $slides, opts) { - $cont.css('overflow','hidden').width(); - opts.before.push(function(curr, next, opts, fwd) { - $.fn.cycle.commonReset(curr,next,opts); - opts.cssBefore.left = fwd ? (next.cycleW-1) : (1-next.cycleW); - opts.animOut.left = fwd ? -curr.cycleW : curr.cycleW; - }); - opts.cssFirst = { left: 0 }; - opts.cssBefore= { top: 0 }; - opts.animIn = { left: 0 }; - opts.animOut = { top: 0 }; -}; -$.fn.cycle.transitions.scrollVert = function($cont, $slides, opts) { - $cont.css('overflow','hidden'); - opts.before.push(function(curr, next, opts, fwd) { - $.fn.cycle.commonReset(curr,next,opts); - opts.cssBefore.top = fwd ? (1-next.cycleH) : (next.cycleH-1); - opts.animOut.top = fwd ? curr.cycleH : -curr.cycleH; - }); - opts.cssFirst = { top: 0 }; - opts.cssBefore= { left: 0 }; - opts.animIn = { top: 0 }; - opts.animOut = { left: 0 }; -}; - -// slideX/slideY -$.fn.cycle.transitions.slideX = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $(opts.elements).not(curr).hide(); - $.fn.cycle.commonReset(curr,next,opts,false,true); - opts.animIn.width = next.cycleW; - }); - opts.cssBefore = { left: 0, top: 0, width: 0 }; - opts.animIn = { width: 'show' }; - opts.animOut = { width: 0 }; -}; -$.fn.cycle.transitions.slideY = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $(opts.elements).not(curr).hide(); - $.fn.cycle.commonReset(curr,next,opts,true,false); - opts.animIn.height = next.cycleH; - }); - opts.cssBefore = { left: 0, top: 0, height: 0 }; - opts.animIn = { height: 'show' }; - opts.animOut = { height: 0 }; -}; - -// shuffle -$.fn.cycle.transitions.shuffle = function($cont, $slides, opts) { - var i, w = $cont.css('overflow', 'visible').width(); - $slides.css({left: 0, top: 0}); - opts.before.push(function(curr,next,opts) { - $.fn.cycle.commonReset(curr,next,opts,true,true,true); - }); - // only adjust speed once! - if (!opts.speedAdjusted) { - opts.speed = opts.speed / 2; // shuffle has 2 transitions - opts.speedAdjusted = true; - } - opts.random = 0; - opts.shuffle = opts.shuffle || {left:-w, top:15}; - opts.els = []; - for (i=0; i < $slides.length; i++) - opts.els.push($slides[i]); - - for (i=0; i < opts.currSlide; i++) - opts.els.push(opts.els.shift()); - - // custom transition fn (hat tip to Benjamin Sterling for this bit of sweetness!) - opts.fxFn = function(curr, next, opts, cb, fwd) { - var $el = fwd ? $(curr) : $(next); - $(next).css(opts.cssBefore); - var count = opts.slideCount; - $el.animate(opts.shuffle, opts.speedIn, opts.easeIn, function() { - var hops = $.fn.cycle.hopsFromLast(opts, fwd); - for (var k=0; k < hops; k++) - fwd ? opts.els.push(opts.els.shift()) : opts.els.unshift(opts.els.pop()); - if (fwd) { - for (var i=0, len=opts.els.length; i < len; i++) - $(opts.els[i]).css('z-index', len-i+count); - } - else { - var z = $(curr).css('z-index'); - $el.css('z-index', parseInt(z)+1+count); - } - $el.animate({left:0, top:0}, opts.speedOut, opts.easeOut, function() { - $(fwd ? this : curr).hide(); - if (cb) cb(); - }); - }); - }; - opts.cssBefore = { display: 'block', opacity: 1, top: 0, left: 0 }; -}; - -// turnUp/Down/Left/Right -$.fn.cycle.transitions.turnUp = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,true,false); - opts.cssBefore.top = next.cycleH; - opts.animIn.height = next.cycleH; - }); - opts.cssFirst = { top: 0 }; - opts.cssBefore = { left: 0, height: 0 }; - opts.animIn = { top: 0 }; - opts.animOut = { height: 0 }; -}; -$.fn.cycle.transitions.turnDown = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,true,false); - opts.animIn.height = next.cycleH; - opts.animOut.top = curr.cycleH; - }); - opts.cssFirst = { top: 0 }; - opts.cssBefore = { left: 0, top: 0, height: 0 }; - opts.animOut = { height: 0 }; -}; -$.fn.cycle.transitions.turnLeft = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,false,true); - opts.cssBefore.left = next.cycleW; - opts.animIn.width = next.cycleW; - }); - opts.cssBefore = { top: 0, width: 0 }; - opts.animIn = { left: 0 }; - opts.animOut = { width: 0 }; -}; -$.fn.cycle.transitions.turnRight = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,false,true); - opts.animIn.width = next.cycleW; - opts.animOut.left = curr.cycleW; - }); - opts.cssBefore = { top: 0, left: 0, width: 0 }; - opts.animIn = { left: 0 }; - opts.animOut = { width: 0 }; -}; - -// zoom -$.fn.cycle.transitions.zoom = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,false,false,true); - opts.cssBefore.top = next.cycleH/2; - opts.cssBefore.left = next.cycleW/2; - opts.animIn = { top: 0, left: 0, width: next.cycleW, height: next.cycleH }; - opts.animOut = { width: 0, height: 0, top: curr.cycleH/2, left: curr.cycleW/2 }; - }); - opts.cssFirst = { top:0, left: 0 }; - opts.cssBefore = { width: 0, height: 0 }; -}; - -// fadeZoom -$.fn.cycle.transitions.fadeZoom = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,false,false); - opts.cssBefore.left = next.cycleW/2; - opts.cssBefore.top = next.cycleH/2; - opts.animIn = { top: 0, left: 0, width: next.cycleW, height: next.cycleH }; - }); - opts.cssBefore = { width: 0, height: 0 }; - opts.animOut = { opacity: 0 }; -}; - -// blindX -$.fn.cycle.transitions.blindX = function($cont, $slides, opts) { - var w = $cont.css('overflow','hidden').width(); - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts); - opts.animIn.width = next.cycleW; - opts.animOut.left = curr.cycleW; - }); - opts.cssBefore = { left: w, top: 0 }; - opts.animIn = { left: 0 }; - opts.animOut = { left: w }; -}; -// blindY -$.fn.cycle.transitions.blindY = function($cont, $slides, opts) { - var h = $cont.css('overflow','hidden').height(); - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts); - opts.animIn.height = next.cycleH; - opts.animOut.top = curr.cycleH; - }); - opts.cssBefore = { top: h, left: 0 }; - opts.animIn = { top: 0 }; - opts.animOut = { top: h }; -}; -// blindZ -$.fn.cycle.transitions.blindZ = function($cont, $slides, opts) { - var h = $cont.css('overflow','hidden').height(); - var w = $cont.width(); - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts); - opts.animIn.height = next.cycleH; - opts.animOut.top = curr.cycleH; - }); - opts.cssBefore = { top: h, left: w }; - opts.animIn = { top: 0, left: 0 }; - opts.animOut = { top: h, left: w }; -}; - -// growX - grow horizontally from centered 0 width -$.fn.cycle.transitions.growX = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,false,true); - opts.cssBefore.left = this.cycleW/2; - opts.animIn = { left: 0, width: this.cycleW }; - opts.animOut = { left: 0 }; - }); - opts.cssBefore = { width: 0, top: 0 }; -}; -// growY - grow vertically from centered 0 height -$.fn.cycle.transitions.growY = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,true,false); - opts.cssBefore.top = this.cycleH/2; - opts.animIn = { top: 0, height: this.cycleH }; - opts.animOut = { top: 0 }; - }); - opts.cssBefore = { height: 0, left: 0 }; -}; - -// curtainX - squeeze in both edges horizontally -$.fn.cycle.transitions.curtainX = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,false,true,true); - opts.cssBefore.left = next.cycleW/2; - opts.animIn = { left: 0, width: this.cycleW }; - opts.animOut = { left: curr.cycleW/2, width: 0 }; - }); - opts.cssBefore = { top: 0, width: 0 }; -}; -// curtainY - squeeze in both edges vertically -$.fn.cycle.transitions.curtainY = function($cont, $slides, opts) { - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,true,false,true); - opts.cssBefore.top = next.cycleH/2; - opts.animIn = { top: 0, height: next.cycleH }; - opts.animOut = { top: curr.cycleH/2, height: 0 }; - }); - opts.cssBefore = { left: 0, height: 0 }; -}; - -// cover - curr slide covered by next slide -$.fn.cycle.transitions.cover = function($cont, $slides, opts) { - var d = opts.direction || 'left'; - var w = $cont.css('overflow','hidden').width(); - var h = $cont.height(); - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts); - if (d == 'right') - opts.cssBefore.left = -w; - else if (d == 'up') - opts.cssBefore.top = h; - else if (d == 'down') - opts.cssBefore.top = -h; - else - opts.cssBefore.left = w; - }); - opts.animIn = { left: 0, top: 0}; - opts.animOut = { opacity: 1 }; - opts.cssBefore = { top: 0, left: 0 }; -}; - -// uncover - curr slide moves off next slide -$.fn.cycle.transitions.uncover = function($cont, $slides, opts) { - var d = opts.direction || 'left'; - var w = $cont.css('overflow','hidden').width(); - var h = $cont.height(); - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,true,true,true); - if (d == 'right') - opts.animOut.left = w; - else if (d == 'up') - opts.animOut.top = -h; - else if (d == 'down') - opts.animOut.top = h; - else - opts.animOut.left = -w; - }); - opts.animIn = { left: 0, top: 0 }; - opts.animOut = { opacity: 1 }; - opts.cssBefore = { top: 0, left: 0 }; -}; - -// toss - move top slide and fade away -$.fn.cycle.transitions.toss = function($cont, $slides, opts) { - var w = $cont.css('overflow','visible').width(); - var h = $cont.height(); - opts.before.push(function(curr, next, opts) { - $.fn.cycle.commonReset(curr,next,opts,true,true,true); - // provide default toss settings if animOut not provided - if (!opts.animOut.left && !opts.animOut.top) - opts.animOut = { left: w*2, top: -h/2, opacity: 0 }; - else - opts.animOut.opacity = 0; - }); - opts.cssBefore = { left: 0, top: 0 }; - opts.animIn = { left: 0 }; -}; - -// wipe - clip animation -$.fn.cycle.transitions.wipe = function($cont, $slides, opts) { - var w = $cont.css('overflow','hidden').width(); - var h = $cont.height(); - opts.cssBefore = opts.cssBefore || {}; - var clip; - if (opts.clip) { - if (/l2r/.test(opts.clip)) - clip = 'rect(0px 0px '+h+'px 0px)'; - else if (/r2l/.test(opts.clip)) - clip = 'rect(0px '+w+'px '+h+'px '+w+'px)'; - else if (/t2b/.test(opts.clip)) - clip = 'rect(0px '+w+'px 0px 0px)'; - else if (/b2t/.test(opts.clip)) - clip = 'rect('+h+'px '+w+'px '+h+'px 0px)'; - else if (/zoom/.test(opts.clip)) { - var top = parseInt(h/2); - var left = parseInt(w/2); - clip = 'rect('+top+'px '+left+'px '+top+'px '+left+'px)'; - } - } - - opts.cssBefore.clip = opts.cssBefore.clip || clip || 'rect(0px 0px 0px 0px)'; - - var d = opts.cssBefore.clip.match(/(\d+)/g); - var t = parseInt(d[0]), r = parseInt(d[1]), b = parseInt(d[2]), l = parseInt(d[3]); - - opts.before.push(function(curr, next, opts) { - if (curr == next) return; - var $curr = $(curr), $next = $(next); - $.fn.cycle.commonReset(curr,next,opts,true,true,false); - opts.cssAfter.display = 'block'; - - var step = 1, count = parseInt((opts.speedIn / 13)) - 1; - (function f() { - var tt = t ? t - parseInt(step * (t/count)) : 0; - var ll = l ? l - parseInt(step * (l/count)) : 0; - var bb = b < h ? b + parseInt(step * ((h-b)/count || 1)) : h; - var rr = r < w ? r + parseInt(step * ((w-r)/count || 1)) : w; - $next.css({ clip: 'rect('+tt+'px '+rr+'px '+bb+'px '+ll+'px)' }); - (step++ <= count) ? setTimeout(f, 13) : $curr.css('display', 'none'); - })(); - }); - opts.cssBefore = { display: 'block', opacity: 1, top: 0, left: 0 }; - opts.animIn = { left: 0 }; - opts.animOut = { left: 0 }; -}; - -})(jQuery); diff --git a/docs/iris/src/_static/logo_banner.png b/docs/iris/src/_static/logo_banner.png deleted file mode 100644 index 14fb3497e9..0000000000 Binary files a/docs/iris/src/_static/logo_banner.png and /dev/null differ diff --git a/docs/iris/src/_static/style.css b/docs/iris/src/_static/style.css deleted file mode 100644 index 69fa84394e..0000000000 --- a/docs/iris/src/_static/style.css +++ /dev/null @@ -1,99 +0,0 @@ -body { - font-family: 'Noto Sans', sans-serif; -} - -.sidebar { z-index: 10; } - -.highlight { background: none; } - -p.hr_p { - overflow: hidden; - text-align: center; -} -p.hr_p a { - font-size: small; - color: #1C86EE; -} -p.hr_p:before, -p.hr_p:after { - background-color: #abc; - border: 1px solid #abc; - content: ""; - display: inline-block; - height: 1px; - position: relative; - vertical-align: middle; - width: 50%; -} -p.hr_p:before { - right: 0.5em; - margin-left: -50%; -} -p.hr_p:after { - left: 0.5em; - margin-right: -50%; -} - -.header-content { - background-color: white; - text-align: left; - padding: 0px; - height: 149px; -} - -.header-content img { - height: 100px; - vertical-align: middle; - float: left; - margin: 20px 2em 0.8em 4%; - padding: 0px; -} - -.header-content .strapline { - display: inline-block; - width: calc(100% - 110px - 2em - 4%); -} - -.strapline p { - font-size: medium; - font-family: 'Alike', serif; - font-weight: bold; - color: #444444; - max-width: 52ch; - margin-top: 0.25em; -} - -.header-content h1 { - font-size: 3.5rem; - font-family: 'Alike', serif; - margin-top: 40px; - padding: 0px; - color: #323232; - padding-bottom: 0.2em; -} - -.header-content h1 span.version { - font-size: 1.5rem; -} - -.github-forkme { - position: absolute; - top: 0; - right: 80px; - border: 0; -} - -/* Take into account the resizing effect of the page (which has a minimum */ -/* width of 740px + 80px margins). */ -@media screen and (max-width: calc(740px + 80px + 80px)) { - .github-forkme { - right: calc(100% - 740px - 80px); - } -} - -@media screen and (max-width: calc(740px + 80px)) { - .github-forkme { - left: calc(740px + 80px - 149px); - right: 0px; - } -} diff --git a/docs/iris/src/_static/theme_override.css b/docs/iris/src/_static/theme_override.css new file mode 100644 index 0000000000..5edc286630 --- /dev/null +++ b/docs/iris/src/_static/theme_override.css @@ -0,0 +1,28 @@ +/* import the standard theme css */ +@import url("css/theme.css"); + +/* now we can add custom any css */ + +/* set the width of the logo */ +.wy-side-nav-search>a img.logo, +.wy-side-nav-search .wy-dropdown>a img.logo { + width: 12rem +} + +/* color of the logo background in the top left corner */ +.wy-side-nav-search { + background-color: lightgray; +} + +/* color of the font for the version in the top left corner */ +.wy-side-nav-search>div.version { + color: black; + font-weight: bold; +} + +/* Ensures tables do now have width scroll bars */ +table.docutils td { + white-space: unset; + word-wrap: break-word; +} + diff --git a/docs/iris/src/_templates/index.html b/docs/iris/src/_templates/index.html deleted file mode 100644 index c18f0268fa..0000000000 --- a/docs/iris/src/_templates/index.html +++ /dev/null @@ -1,146 +0,0 @@ -{% extends "layout.html" %} -{% set title = 'Iris documentation homepage' %} -{% block extrahead %} -{{ super() }} - - - - - - - - -{% endblock %} - - - -{% block body %} - - - - -

- Iris implements a data model based on the CF conventions - giving you a powerful, format-agnostic interface for working with your data. - It excels when working with multi-dimensional Earth Science data, where tabular - representations become unwieldy and inefficient. -

-

- CF Standard names, - units, and coordinate metadata - are built into Iris, giving you a rich and expressive interface for maintaining - an accurate representation of your data. Its treatment of data and - associated metadata as first-class objects includes: -

-
    -
  • a visualisation interface based on matplotlib and - cartopy,
  • -
  • unit conversion,
  • -
  • subsetting and extraction,
  • -
  • merge and concatenate,
  • -
  • aggregations and reductions (including min, max, mean and weighted averages),
  • -
  • interpolation and regridding (including nearest-neighbor, linear and area-weighted), and
  • -
  • operator overloads (+, -, *, /, etc.).
  • -
-

- A number of file formats are recognised by Iris, including CF-compliant NetCDF, GRIB, - and PP, and it has a plugin architecture to allow other formats to be added seamlessly. -

-

- Building upon NumPy and - dask, - Iris scales from efficient single-machine workflows right through to multi-core - clusters and HPC. - Interoperability with packages from the wider scientific Python ecosystem comes from Iris' - use of standard NumPy/dask arrays as its underlying data storage. -

- -
-
-
-
- -
- -
- - -
- -{% endblock %} diff --git a/docs/iris/src/_templates/layout.html b/docs/iris/src/_templates/layout.html index f854455f71..9b4983697e 100644 --- a/docs/iris/src/_templates/layout.html +++ b/docs/iris/src/_templates/layout.html @@ -1,71 +1,25 @@ {% extends "!layout.html" %} -{%- block extrahead %} -{{ super() }} - - - - - - - - - - -{% endblock %} - - -{% block rootrellink %} -
  • home
  • -
  • examples
  • -
  • gallery
  • -
  • contents
  • -{% endblock %} - - -{% block relbar1 %} - - - Fork Iris on GitHub - - - -
    - - Iris logo - -
    -

    - Iris v3.0 -

    -

    - A powerful, format-agnostic, community-driven Python library for analysing and - visualising Earth science data. -

    -
    -
    - -{{ super() }} +{% block menu %} + {{ super() }} + + {# menu_links and menu_links_name are set in conf.py (html_context) #} + + {% if menu_links %} +

    + + {% if menu_links_name %} + {{ menu_links_name }} + {% else %} + External links + {% endif %} + +

    +
      + {% for text, link in menu_links %} +
    • {{ text }}
    • + {% endfor %} +
    + {% endif %} {% endblock %} - - -{% block footer %} - - - - - -{% endblock %} diff --git a/docs/iris/src/conf.py b/docs/iris/src/conf.py index 98c12d2cb2..9b061f5ec6 100644 --- a/docs/iris/src/conf.py +++ b/docs/iris/src/conf.py @@ -17,78 +17,65 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import datetime +# ---------------------------------------------------------------------------- + +import ntpath import os import sys -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath("sphinxext")) -# add some sample files from the developers guide.. -sys.path.append(os.path.abspath(os.path.join("developers_guide"))) +# function to write useful output to stdout, prefixing the source. +def autolog(message): + print("[{}] {}".format(ntpath.basename(__file__), message)) -# -- General configuration ----------------------------------------------------- +# -- Are we running on the readthedocs server, if so do some setup ----------- -# Temporary value for use by LaTeX and 'man' output. -# Deleted at the end of the module. -_authors = "Iris developers" +on_rtd = os.environ.get("READTHEDOCS") == "True" -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' +if on_rtd: + autolog("Build running on READTHEDOCS server") -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.extlinks", - "sphinx.ext.graphviz", - "sphinx.ext.imgmath", - "sphinx.ext.intersphinx", - "matplotlib.sphinxext.mathmpl", - "matplotlib.sphinxext.plot_directive", - # better class documentation - "custom_class_autodoc", - # Data instance __repr__ filter. - "custom_data_autodoc", - "gen_example_directory", - "generate_package_rst", - "gen_gallery", - # Add labels to figures automatically - "auto_label_figures", -] + # list all the READTHEDOCS environment variables that may be of use + # at some point + autolog("Listing all environment variables on the READTHEDOCS server...") -# list of packages to document -autopackage_name = ["iris"] + for item, value in os.environ.items(): + autolog("[READTHEDOCS] {} = {}".format(item, value)) -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +import datetime +import warnings -# The suffix of source filenames. -source_suffix = ".rst" +# custom sphinx extensions +sys.path.append(os.path.abspath("sphinxext")) + +# add some sample files from the developers guide.. +sys.path.append(os.path.abspath(os.path.join("developers_guide"))) -# The encoding of source files. -# source_encoding = 'utf-8-sig' +# why isnt the iris path added to it is discoverable too? We dont need to, +# the sphinext to generate the api rst knows where the source is. If it +# is added then the travis build will likely fail. -# The master toctree document. -master_doc = "contents" +# -- Project information ----------------------------------------------------- -# General information about the project. project = "Iris" + # define the copyright information for latex builds. Note, for html builds, # the copyright exists directly inside "_templates/layout.html" upper_copy_year = datetime.datetime.now().year -copyright = "Copyright Iris contributors" +copyright = "Iris Contributors" +author = "Iris Developers" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -# + import iris # The short X.Y version. @@ -100,218 +87,187 @@ # The full version, including alpha/beta/rc tags. release = iris.__version__ -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' +autolog("Iris Version = {}".format(version)) +autolog("Iris Release = {}".format(release)) -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["sphinxext", "build"] +# -- General configuration --------------------------------------------------- -# The reST default role (used for this markup: `text`) to use for all documents. -# default_role = None +# Create a variable that can be insterted in the rst "|copyright_years|". +# You can add more vairables here if needed +rst_epilog = """ +.. |copyright_years| replace:: {year_range} +""".format( + year_range="2010 - {}".format(upper_copy_year) +) -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# Define the default highlight language. This also allows the >>> removal -# javascript (copybutton.js) to function. -highlight_language = "default" +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.todo", + "sphinx.ext.duration", + "sphinx.ext.coverage", + "sphinx.ext.viewcode", + "sphinx.ext.autosummary", + "sphinx.ext.doctest", + "sphinx.ext.extlinks", + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx_copybutton", + # TODO: Spelling extension disabled until the dependencies can be included + # "sphinxcontrib.spelling", + "sphinx_gallery.gen_gallery", + "matplotlib.sphinxext.mathmpl", + "matplotlib.sphinxext.plot_directive", + # better api documentation (custom) + "custom_class_autodoc", + "custom_data_autodoc", + "generate_package_rst", +] -# A list of ignored prefixes for module index sorting. +# -- spellingextension -------------------------------------------------------- +# See https://sphinxcontrib-spelling.readthedocs.io/en/latest/customize.html +spelling_lang = "en_GB" +# The lines in this file must only use line feeds (no carriage returns). +spelling_word_list_filename = ["spelling_allow.txt"] +spelling_show_suggestions = False +spelling_show_whole_line = False +spelling_ignore_importable_modules = True +spelling_ignore_python_builtins = True + +# -- copybutton extension ----------------------------------------------------- +# See https://sphinx-copybutton.readthedocs.io/en/latest/ +copybutton_prompt_text = ">>> " + +# sphinx.ext.todo configuration +# See https://www.sphinx-doc.org/en/master/usage/extensions/todo.html +todo_include_todos = True + +# api generation configuration +autodoc_member_order = "groupwise" +autodoc_default_flags = ["show-inheritance"] +autosummary_generate = True +autosummary_imported_members = True +autopackage_name = ["iris"] +autoclass_content = "init" modindex_common_prefix = ["iris"] +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# -- intersphinx extension ---------------------------------------------------- +# See https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html intersphinx_mapping = { "cartopy": ("http://scitools.org.uk/cartopy/docs/latest/", None), - "iris-grib": ("http://iris-grib.readthedocs.io/en/latest/", None), "matplotlib": ("http://matplotlib.org/", None), "numpy": ("http://docs.scipy.org/doc/numpy/", None), "python": ("http://docs.python.org/2.7", None), "scipy": ("http://docs.scipy.org/doc/scipy/reference/", None), } +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# -- plot_directive extension ------------------------------------------------- +# See https://matplotlib.org/3.1.3/devel/plot_directive.html#options +plot_formats = [ + ("png", 100), +] + # -- Extlinks extension ------------------------------------------------------- +# See https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html extlinks = { "issue": ("https://github.com/SciTools/iris/issues/%s", "Issue #"), "pull": ("https://github.com/SciTools/iris/pull/%s", "PR #"), } -# -- Doctest ------------------------------------------------------------------ +# -- Doctest ("make doctest")-------------------------------------------------- doctest_global_setup = "import iris" -# -- Autodoc ------------------------------------------------------------------ - -autodoc_member_order = "groupwise" -autodoc_default_flags = ["show-inheritance"] - -# include the __init__ method when documenting classes -# document the init/new method at the top level of the class documentation rather than displaying the class docstring -autoclass_content = "init" - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = "default" -html_theme = "sphinxdoc" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -html_context = {"copyright_years": "2010 - {}".format(upper_copy_year)} - -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None +# -- Options for HTML output -------------------------------------------------- -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_logo = "_static/iris-logo-title.png" +html_favicon = "_static/favicon.ico" +html_theme = "sphinx_rtd_theme" + +html_theme_options = { + "display_version": True, + "style_external_links": True, + "logo_only": "True", +} -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None +html_context = { + "copyright_years": "2010 - {}".format(upper_copy_year), + # menu_links and menu_links_name are used in _templates/layout.html + # to include some nice icons. See http://fontawesome.io for a list of + # icons (used in the sphinx_rtd_theme) + "menu_links_name": "Support", + "menu_links": [ + ( + ' Source Code', + "https://github.com/SciTools/iris", + ), + ( + ' Users Google Group', + "https://groups.google.com/forum/#!forum/scitools-iris", + ), + ( + ' Developers Google Group', + "https://groups.google.com/forum/#!forum/scitools-iris-dev", + ), + ( + ' StackOverflow For "How do I?"', + "https://stackoverflow.com/questions/tagged/python-iris", + ), + ( + ' Legacy documentation', + "https://scitools.org.uk/iris/docs/v2.4.0/index.html", + ), + ], +} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -html_additional_pages = {"index": "index.html", "gallery": "gallery.html"} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -html_show_sphinx = False - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = '' - -# Output file base name for HTML help builder. -htmlhelp_basename = "Irisdoc" - -html_use_modindex = False - - -# -- Options for LaTeX output -------------------------------------------------- - -# The paper size ('letter' or 'a4'). -# latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -# latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ( - "contents", - "Iris.tex", - "Iris Documentation", - " \\and ".join(_authors), - "manual", - ), +html_style = "theme_override.css" + +# url link checker. Some links work but report as broken, lets ignore them. +# See https://www.sphinx-doc.org/en/1.2/config.html#options-for-the-linkcheck-builder +linkcheck_ignore = [ + "https://github.com/SciTools/iris/commit/69597eb3d8501ff16ee3d56aef1f7b8f1c2bb316#diff-1680206bdc5cfaa83e14428f5ba0f848", + "http://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf", + "http://code.google.com/p/msysgit/downloads/list", ] -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Additional stuff for the LaTeX preamble. -# latex_preamble = '' - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True -latex_elements = {} -latex_elements["docclass"] = "MO_report" - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [("index", "iris", "Iris Documentation", _authors, 1)] - -########################## -# plot directive options # -########################## - -plot_formats = [ - ("png", 100), - # ('hires.png', 200), ('pdf', 250) -] +# list of sources to exclude from the build. +exclude_patterns = [] + +# -- sphinx-gallery config ---------------------------------------------------- +# See https://sphinx-gallery.github.io/stable/configuration.html + +sphinx_gallery_conf = { + # path to your example scripts + "examples_dirs": ["../gallery_code"], + # path to where to save gallery generated output + "gallery_dirs": ["generated/gallery"], + # filename pattern for the files in the gallery + "filename_pattern": "/plot_", + # filename patternt to ignore in the gallery + "ignore_pattern": r"__init__\.py", +} -# Delete the temporary value. -del _authors +# ----------------------------------------------------------------------------- +# Remove matplotlib agg warnings from generated doc when using plt.show +warnings.filterwarnings( + "ignore", + category=UserWarning, + message="Matplotlib is currently using agg, which is a" + " non-GUI backend, so cannot show the figure.", +) diff --git a/docs/iris/src/contents.rst b/docs/iris/src/contents.rst deleted file mode 100644 index ecaf025a7a..0000000000 --- a/docs/iris/src/contents.rst +++ /dev/null @@ -1,32 +0,0 @@ -===================================== -Iris documentation table of contents -===================================== -.. toctree:: - :maxdepth: 1 - - installing.rst - -.. toctree:: - :maxdepth: 3 - - userguide/index.rst - -.. toctree:: - :maxdepth: 1 - :hidden: - - iris/iris.rst - -.. toctree:: - :maxdepth: 2 - - whatsnew/index.rst - -.. toctree:: - :maxdepth: 1 - - examples/index.rst - developers_guide/index.rst - whitepapers/index.rst - copyright.rst - diff --git a/docs/iris/src/copyright.rst b/docs/iris/src/copyright.rst index 71e860336d..08a40e5a1e 100644 --- a/docs/iris/src/copyright.rst +++ b/docs/iris/src/copyright.rst @@ -1,4 +1,4 @@ -========================================== + Iris copyright, licensing and contributors ========================================== @@ -28,7 +28,7 @@ are licensed under the UK's Open Government Licence: .. admonition:: Documentation, example and data license - (C) British Crown Copyright 2019. + (C) British Crown Copyright |copyright_years| You may use and re-use the information featured on this website (not including logos) free of charge in any format or medium, under the terms of the diff --git a/docs/iris/src/developers_guide/code_format.rst b/docs/iris/src/developers_guide/code_format.rst index 8033babceb..c889146269 100644 --- a/docs/iris/src/developers_guide/code_format.rst +++ b/docs/iris/src/developers_guide/code_format.rst @@ -1,6 +1,6 @@ .. _iris_code_format: -Code Formatting +Code formatting *************** To enforce a consistent code format throughout Iris, we recommend using `pre-commit `_ to run diff --git a/docs/iris/src/developers_guide/contributing_documentation.rst b/docs/iris/src/developers_guide/contributing_documentation.rst new file mode 100644 index 0000000000..618e5fbd08 --- /dev/null +++ b/docs/iris/src/developers_guide/contributing_documentation.rst @@ -0,0 +1,161 @@ + +.. toctree:: + :maxdepth: 2 + +.. _contributing.documentation: + +Contributing to the documentation +================================== + +Documentation is important and we encourage any improvements that can be made. +If you believe the documentation is not clear please contribute a change to +improve the documentation for all users. + +Any change to the Iris project whether it is a bugfix, new feature or +documentation update must use the :ref:`development-workflow`. + +.. contents:: Contents: + :local: + + +Requirements +------------ + +The documentation uses specific packages that need to be present. Please see +:ref:`installing_iris` for instructions. + + +.. _contributing.documentation.building: + +Building +-------- + +The build is run from the documentation directory ``iris/docs/iris/src``. In +this directory run:: + + make html + +The build output for the html is found in the ``_build/html`` sub directory. +When updating the documentation ensure the html build has *no errors* or +*warnings* otherwise it may fail the automated `travis-ci`_ build. + +Once the build is complete, if it is rerun it will only rebuild the impacted +build artefacts so should take less time. + +There is also an option to perform a build but skip the +:ref:`contributing.documentation.gallery` creation completely. This can be +achieved via:: + + make html-noplot + +If you wish to run a clean build you can run:: + + make clean + make html + +This is useful for a final test before committing your changes. + +.. note:: In addition to the automated `travis-ci`_ build of the documentation, + the https://readthedocs.org/ service is also used. The configuration + of this held in a file in the root of the + `github Iris project `_ named + ``.readthedocs.yml``. + +.. _travis-ci: https://travis-ci.org/github/SciTools/iris + +.. _contributing.documentation.testing: + +Testing +------- + +There are a ways to test various aspects of the documentation. The +``make`` commands shown below can be run in the ``iris/docs/iris`` or +``iris/docs/iris/src`` directory. + +Each :ref:`contributing.documentation.gallery` entry has a corresponding test. +To run the tests:: + + make gallerytest + +Many documentation pages includes python code itself that can be run to ensure +it is still valid:: + + make doctest + +The hyperlinks in the documentation can be checked automatically. +If there is a link that is known to work it can be excluded from the checks by +adding it to the ``linkcheck_ignore`` array that is defined in the +`conf.py`_. The hyperlink check can be run via:: + + make linkcheck + +If this fails check the output for the text **broken** and then correct +or ignore the url. + +.. comment + Finally, the spelling in the documentation can be checked automatically via the + command:: + + make spelling + + The spelling check may pull up many technical abbreviations and acronyms. This + can be managed by using an **allow** list in the form of a file. This file, + or list of files is set in the `conf.py`_ using the string list + ``spelling_word_list_filename``. + + +.. note:: All of the above tests are automatically run as part of the + `travis-ci`_ automated build. + +.. _conf.py: https://github.com/SciTools/iris/blob/master/docs/iris/src/conf.py + + +.. _contributing.documentation.api: + +Generating API documentation +---------------------------- + +In order to auto generate the API documentation based upon the docstrings a +custom set of python scripts are used, these are located in the directory +``iris/docs/iris/src/sphinxext``. Once the ``make html`` command has been run, +the output of these scripts can be found in +``iris/docs/iris/src/_build/generated/api``. + +If there is a particularly troublesome module that breaks the ``make html`` you +can exclude the module from the API documentation. Add the entry to the +``exclude_modules`` tuple list in the +``iris/docs/iris/src/sphinxext/generate_package_rst.py`` file. + + +.. _contributing.documentation.gallery: + +Gallery +------- + +The Iris :ref:`sphx_glr_generated_gallery` uses a sphinx extension named +`sphinx-gallery `_ +that auto generates reStructuredText (rst) files based upon a gallery source +directory that abides directory and filename convention. + +The code for the gallery entries are in ``iris/docs/iris/gallery_code``. +Each sub directory in this directory is a sub section of the gallery. The +respective ``README.rst`` in each folder is included in the gallery output. + +For each gallery entry there must be a corresponding test script located in +``iris/docs/iris/gallery_tests``. + +To add an entry to the gallery simple place your python code into the +appropriate sub directory and name it with a prefix of ``plot_``. If your +gallery entry does not fit into any existing sub directories then create a new +directory and place it in there. + +The reStructuredText (rst) output of the gallery is located in +``iris/docs/iris/src/_build/generated/gallery``. + +For more information on the directory structure and options please see the +`sphinx-gallery getting started +`_ documentation. + + + + diff --git a/docs/iris/src/developers_guide/documenting/docstrings.rst b/docs/iris/src/developers_guide/documenting/docstrings.rst index 4499f3fe34..641bf7717e 100644 --- a/docs/iris/src/developers_guide/documenting/docstrings.rst +++ b/docs/iris/src/developers_guide/documenting/docstrings.rst @@ -1,11 +1,12 @@ -================ - Docstrings -================ +========== +Docstrings +========== - -Guiding principle: Every public object in the Iris package should have an appropriate docstring. +Guiding principle: Every public object in the Iris package should have an +appropriate docstring. This document has been influenced by the following PEP's, + * Attribute Docstrings `PEP-224 `_ * Docstring Conventions `PEP-257 `_ @@ -34,7 +35,7 @@ The multi-line docstring *description section* should expand on what was stated Sample multi-line docstring --------------------------- -Here is a simple example of a standard dosctring: +Here is a simple example of a standard docstring: .. literalinclude:: docstrings_sample_routine.py @@ -57,10 +58,10 @@ Documenting classes =================== The class constructor should be documented in the docstring for its ``__init__`` or ``__new__`` method. Methods should be documented by their own docstring, not in the class header itself. -If a class subclasses another class and its behavior is mostly inherited from that class, its docstring should mention this and summarise the differences. Use the verb "override" to indicate that a subclass method replaces a superclass method and does not call the superclass method; use the verb "extend" to indicate that a subclass method calls the superclass method (in addition to its own behavior). +If a class subclasses another class and its behaviour is mostly inherited from that class, its docstring should mention this and summarise the differences. Use the verb "override" to indicate that a subclass method replaces a superclass method and does not call the superclass method; use the verb "extend" to indicate that a subclass method calls the superclass method (in addition to its own behaviour). -Attribute and Property docstrings +Attribute and property docstrings --------------------------------- Here is a simple example of a class containing an attribute docstring and a property docstring: diff --git a/docs/iris/src/developers_guide/documenting/rest_guide.rst b/docs/iris/src/developers_guide/documenting/rest_guide.rst index 8ce97a3c4a..aadb5ffea4 100644 --- a/docs/iris/src/developers_guide/documenting/rest_guide.rst +++ b/docs/iris/src/developers_guide/documenting/rest_guide.rst @@ -1,28 +1,38 @@ -=============== -reST quickstart -=============== +================ +reST quick start +================ -reST (http://en.wikipedia.org/wiki/ReStructuredText) is a lightweight markup language intended to be highly readable in source format. This guide will cover some of the more frequently used advanced reST markup syntaxes, for the basics of reST the following links may be useful: +reST (http://en.wikipedia.org/wiki/ReStructuredText) is a lightweight markup +language intended to be highly readable in source format. This guide will +cover some of the more frequently used advanced reST markup syntaxes, for the +basics of reST the following links may be useful: - * http://sphinx.pocoo.org/rest.html - * http://docs.geoserver.org/trunk/en/docguide/sphinx.html + * https://www.sphinx-doc.org/en/master/usage/restructuredtext/ * http://packages.python.org/an_example_pypi_project/sphinx.html Reference documentation for reST can be found at http://docutils.sourceforge.net/rst.html. Creating links -------------- -Basic links can be created with ```Text of the link `_`` which will look like `Text of the link `_ +Basic links can be created with ```Text of the link `_`` +which will look like `Text of the link `_ -Documents in the same project can be cross referenced with the syntax ``:doc:`document_name``` for example, to reference the "docstrings" page ``:doc:`docstrings``` creates the following link :doc:`docstrings` +Documents in the same project can be cross referenced with the syntax +``:doc:`document_name``` for example, to reference the "docstrings" page +``:doc:`docstrings``` creates the following link :doc:`docstrings` -References can be created between sections by first making a "label" where you would like the link to point to ``.. _name_of_reference::`` the appropriate link can now be created with ``:ref:`name_of_reference``` (note the trailing underscore on the label) +References can be created between sections by first making a "label" where +you would like the link to point to ``.. _name_of_reference::`` the +appropriate link can now be created with ``:ref:`name_of_reference``` +(note the trailing underscore on the label) -Cross referencing other reference documentation can be achieved with the syntax ``:py:class:`zipfile.ZipFile``` which will result in links such as :py:class:`zipfile.ZipFile` and :py:class:`numpy.ndarray`. +Cross referencing other reference documentation can be achieved with the +syntax ``:py:class:`zipfile.ZipFile``` which will result in links such as +:py:class:`zipfile.ZipFile` and :py:class:`numpy.ndarray`. diff --git a/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst b/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst index 203a422457..b4ca483075 100644 --- a/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst +++ b/docs/iris/src/developers_guide/documenting/whats_new_contributions.rst @@ -4,21 +4,17 @@ Contributing a "What's New" entry ================================= -Iris has an aggregator for building a draft what's new document for each -release. The draft what's new document is built from contributions by code authors. -This means contributions to the what's new document are written by the -developer most familiar with the change made. +Iris uses a file named ``latest.rst`` to keep a draft of upcoming changes +that will form the next release. Contributions to the :ref:`iris_whatsnew` +document are written by the developer most familiar with the change made. +The contribution should be included as part of the Iris Pull Request that +introduces the change. -A contribution provides an entry in the what's new document, which describes a -change that improved Iris in some way. This change may be a new feature in Iris -or the fix for a bug introduced in a previous release. The contribution should -be included as part of the Iris Pull Request that introduces the change. +The ``latest.rst`` and the past release notes are kept in +``docs/iris/src/whatsnew/``. -When a new release is prepared, the what's new contributions are combined into -a draft what's new document for the release. - -Writing a Contribution +Writing a contribution ====================== As introduced above, a contribution is the description of a change to Iris @@ -26,98 +22,54 @@ which improved Iris in some way. As such, a single Iris Pull Request may contain multiple changes that are worth highlighting as contributions to the what's new document. -Each contribution will ideally be written as a single concise bullet point. -The content of the bullet point should highlight the change that has been made -to Iris, targeting an Iris user as the audience. - -A contribution is a feature summary by the code author, which avoids the -release developer having to personally review the change in detail : -It is not in itself the final documentation content, -so it does not have to be perfect or complete in every respect. - - -Adding Contribution Files -========================= +Each contribution will ideally be written as a single concise bullet point +in a reStructuredText format with a trailing blank line. For example:: -Each release must have a directory called ``contributions_``, -which should be created following the release of the current version of Iris. Each -release directory must be placed in ``docs/iris/src/whatsnew/``. -Contributions to the what's new must be written in markdown and placed into this -directory in text files. The filename for each item should be structured as follows: + * Fixed :issue:`9999`. Lorem ipsum dolor sit amet, consectetur adipiscing + elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. + -``__.txt`` - -Category --------- -The category must be one of the following: - -*newfeature* - Features that are new or changed to add functionality. -*bugfix* - A bugfix. -*incompatiblechange* - A change that causes an incompatibility with prior versions of Iris. -*deprecate* - Deprecations of functionality. -*docchange* - Changes to documentation. +Note that this example also cites the related issue, optionally you may also +include the pull request using the notation ``:pull:`9999```. Where possible +do not exceed **column 80** and ensure that any subsequent lines +of the same bullet point is aligned with the first. -Date ----- - -The date must be a hyphen-separated date in the format of: - - * a four digit year, - * a three character month name, and - * a two digit day. - -For example: - - * 2012-Jan-30 - * 2014-May-03 - * 2015-Feb-19 - -Summary -------- +The content of the bullet point should highlight the change that has been made +to Iris, targeting an Iris user as the audience. -The summary can be any remaining filename characters, and simply provides a -short identifying description of the change. +For inspiration that may include adding links to code please examine past +what's :ref:`iris_whatsnew` entries. -For example: +.. note:: The reStructuredText syntax will be checked as part of building + the documentation. Any warnings should be corrected. + `travis-ci`_ will automatically build the documentation when + creating a pull request, however you can also manually + :ref:`build ` the documentation. - * whats-new-aggregator - * using_mo_pack - * correction-to-bilinear-regrid - * GRIB2_pdt11 +.. _travis-ci: https://travis-ci.org/github/SciTools/iris -Complete Examples ------------------ +Contribution categories +======================= -Some sample what's new contribution filenames: +The structure of the what's new release note should be easy to read by +users. To achieve this several categories may be used. - * bugfix_2015-Aug-18_partial_pp_constraints.txt - * deprecate_2015-Nov-01_unit-module.txt - * incompatiblechange_2015-Oct-12_GRIB_optional_Python3_unavailable.txt - * newfeature_2015-Jul-03_pearsonr_rewrite.txt +*Features* + Features that are new or changed to add functionality. -.. note:: - A test in the standard test suite ensures that all the contents of the - latest contributions directory conform to this naming scheme. +*Bug Fixes* + A bug fix. +*Incompatible Changes* + A change that causes an incompatibility with prior versions of Iris. -Compiling a Draft -================= +*Internal* + Changes to any internal or development related topics, such as testing, + environment dependencies etc -Compiling a draft from the supplied contributions should be done when preparing -a release. Running ``docs/iris/src/whatsnew/aggregate_directory.py`` with the -release number as the argument will create a draft what's new with the name -``.rst`` file for the specified release, by aggregating the individual -contributions from the relevant folder. -Omitting the release number will build the latest version for which a -contributions folder is present. -This command fails if a file with the relevant name already exists. +*Deprecations* + Deprecations of functionality. -The resulting draft document is only a starting point, which the release -developer will then edit to produce the final 'What's new in Iris x.x' -documentation. +*Documentation* + Changes to documentation. diff --git a/docs/iris/src/developers_guide/gitwash/configure_git.rst b/docs/iris/src/developers_guide/gitwash/configure_git.rst index 0e18b666d0..fd3cf0db20 100644 --- a/docs/iris/src/developers_guide/gitwash/configure_git.rst +++ b/docs/iris/src/developers_guide/gitwash/configure_git.rst @@ -55,7 +55,7 @@ In detail user.name and user.email ------------------------ -It is good practice to tell git_ who you are, for labeling any changes +It is good practice to tell git_ who you are, for labelling any changes you make to the code. The simplest way to do this is from the command line:: diff --git a/docs/iris/src/developers_guide/gitwash/development_workflow.rst b/docs/iris/src/developers_guide/gitwash/development_workflow.rst index 4da6b700ba..dee06454a1 100644 --- a/docs/iris/src/developers_guide/gitwash/development_workflow.rst +++ b/docs/iris/src/developers_guide/gitwash/development_workflow.rst @@ -18,7 +18,7 @@ In what follows we'll refer to the upstream iris ``master`` branch, as * When you are starting a new set of changes, fetch any changes from trunk, and start a new *feature branch* from that. * Make a new branch for each separable set of changes |emdash| "one task, one - branch" (`ipython git workflow`_). + branch". * Name your branch for the purpose of the changes - e.g. ``bugfix-for-issue-14`` or ``refactor-database-code``. * If you can possibly avoid it, avoid merging trunk or any other branches into @@ -31,7 +31,7 @@ This way of working helps to keep work well organized, with readable history. This in turn makes it easier for project maintainers (that might be you) to see what you've done, and why you did it. -See `linux git workflow`_ and `ipython git workflow`_ for some explanation. +See `linux git workflow`_ for some explanation. Consider deleting your master branch ==================================== @@ -148,7 +148,7 @@ Testing your changes ==================== Once you are happy with your changes, work thorough the :ref:`pr_check` and make sure -your branch passess all the relevant tests. +your branch passes all the relevant tests. Ask for your changes to be reviewed or merged ============================================= diff --git a/docs/iris/src/developers_guide/gitwash/forking_hell.rst b/docs/iris/src/developers_guide/gitwash/forking_hell.rst index 2b38c02736..4b591d7b0e 100644 --- a/docs/iris/src/developers_guide/gitwash/forking_hell.rst +++ b/docs/iris/src/developers_guide/gitwash/forking_hell.rst @@ -1,7 +1,7 @@ .. _forking: ====================================================== -Making your own copy (fork) of iris +Making your own copy (fork) of Iris ====================================================== You need to do this only once. The instructions here are very similar @@ -17,7 +17,7 @@ If you don't have a github account, go to the github page, and make one. You then need to configure your account to allow write access |emdash| see the ``Generating SSH keys`` help on `github help`_. -Create your own forked copy of `iris`_ +Create your own forked copy of `Iris`_ ====================================================== #. Log into your github account. diff --git a/docs/iris/src/developers_guide/gitwash/git_development.rst b/docs/iris/src/developers_guide/gitwash/git_development.rst index c5b910d863..1b4398e132 100644 --- a/docs/iris/src/developers_guide/gitwash/git_development.rst +++ b/docs/iris/src/developers_guide/gitwash/git_development.rst @@ -4,8 +4,6 @@ Git for development ===================== -Contents: - .. toctree:: :maxdepth: 2 diff --git a/docs/iris/src/developers_guide/gitwash/git_install.rst b/docs/iris/src/developers_guide/gitwash/git_install.rst index 3be5149b90..d63f188b2e 100644 --- a/docs/iris/src/developers_guide/gitwash/git_install.rst +++ b/docs/iris/src/developers_guide/gitwash/git_install.rst @@ -7,12 +7,12 @@ Overview ======== -================ ============= +================ =============================== Debian / Ubuntu ``sudo apt-get install git`` Fedora ``sudo yum install git`` Windows Download and install msysGit_ OS X Use the git-osx-installer_ -================ ============= +================ =============================== In detail ========= @@ -21,6 +21,4 @@ See the git page for the most recent information. Have a look at the github install help pages available from `github help`_ -There are good instructions here: http://book.git-scm.com/2_installing_git.html - .. include:: links.inc diff --git a/docs/iris/src/developers_guide/gitwash/git_links.inc b/docs/iris/src/developers_guide/gitwash/git_links.inc index 8e628ae19e..28cae6a025 100644 --- a/docs/iris/src/developers_guide/gitwash/git_links.inc +++ b/docs/iris/src/developers_guide/gitwash/git_links.inc @@ -15,7 +15,6 @@ .. _msysgit: http://code.google.com/p/msysgit/downloads/list .. _git-osx-installer: http://code.google.com/p/git-osx-installer/downloads/list .. _subversion: http://subversion.tigris.org/ -.. _git cheat sheet: http://github.com/guides/git-cheat-sheet .. _pro git book: http://progit.org/ .. _git svn crash course: http://git-scm.com/course/svn.html .. _learn.github: http://learn.github.com/ diff --git a/docs/iris/src/developers_guide/gitwash/git_resources.rst b/docs/iris/src/developers_guide/gitwash/git_resources.rst index d18b0ef48b..6f05422771 100644 --- a/docs/iris/src/developers_guide/gitwash/git_resources.rst +++ b/docs/iris/src/developers_guide/gitwash/git_resources.rst @@ -1,7 +1,7 @@ .. _git-resources: ============= -git resources +Git resources ============= Tutorials and summaries @@ -9,8 +9,6 @@ Tutorials and summaries * `github help`_ has an excellent series of how-to guides. * `learn.github`_ has an excellent series of tutorials -* The `pro git book`_ is a good in-depth book on git. -* A `git cheat sheet`_ is a page giving summaries of common commands. * The `git user manual`_ * The `git tutorial`_ * The `git community book`_ @@ -22,7 +20,6 @@ Tutorials and summaries * Fernando Perez' git page |emdash| `Fernando's git page`_ |emdash| many links and tips * A good but technical page on `git concepts`_ -* `git svn crash course`_: git for those of us used to subversion_ Advanced git workflow ===================== @@ -30,7 +27,6 @@ Advanced git workflow There are many ways of working with git; here are some posts on the rules of thumb that other projects have come up with: -* Linus Torvalds on `git management`_ * Linus Torvalds on `linux git workflow`_ . Summary; use the git tools to make the history of your edits as clean as possible; merge from upstream edits as little as possible in branches where you are doing diff --git a/docs/iris/src/developers_guide/gitwash/index.rst b/docs/iris/src/developers_guide/gitwash/index.rst index 35eee1944a..a69515548a 100644 --- a/docs/iris/src/developers_guide/gitwash/index.rst +++ b/docs/iris/src/developers_guide/gitwash/index.rst @@ -3,8 +3,6 @@ Working with *iris* source code ================================================ -Contents: - .. toctree:: :maxdepth: 2 diff --git a/docs/iris/src/developers_guide/graphics_tests.rst b/docs/iris/src/developers_guide/graphics_tests.rst index 2782f319ec..8f63bd3381 100644 --- a/docs/iris/src/developers_guide/graphics_tests.rst +++ b/docs/iris/src/developers_guide/graphics_tests.rst @@ -1,26 +1,29 @@ +:orphan: + .. _developer_graphics_tests: Graphics tests ************** The only practical way of testing plotting functionality is to check actual -output plots. -For this, a basic 'graphics test' assertion operation is provided in the method -:meth:`iris.tests.IrisTest.check_graphic` : This tests plotted output for a -match against a stored reference. -A "graphics test" is any test which employs this. - -At present, such tests include the testing for modules `iris.tests.test_plot` -and `iris.tests.test_quickplot`, all output plots from the gallery examples -(contained in `docs/iris/example_tests`), and a few other 'legacy' style tests -(as described in :ref:`developer_tests`). +output plots. For this, a basic 'graphics test' assertion operation is +provided in the method :meth:`iris.tests.IrisTest.check_graphic` : This +tests plotted output for a match against a stored reference. A +"graphics test" is any test which employs this. + +At present, such tests include the testing for modules ``iris.tests.test_plot`` +and ``iris.tests.test_quickplot``, all output plots from the gallery +contained in ``docs/iris/gallery_tests``, and a few other 'legacy' style tests +as described in :ref:`developer_tests` +. It is conceivable that new 'graphics tests' of this sort can still be added. However, as graphics tests are inherently "integration" style rather than true unit tests, results can differ with the installed versions of dependent libraries (see below), so this is not recommended except where no alternative is practical. -Testing actual plot results introduces some significant difficulties : +Testing actual plot results introduces some significant difficulties: + * Graphics tests are inherently 'integration' style tests, so results will often vary with the versions of key dependencies, i.e. the exact versions of third-party modules which are installed : Obviously, results will depend on @@ -36,7 +39,7 @@ Testing actual plot results introduces some significant difficulties : given multiple independent sources of variation. -Graphics Testing Strategy +Graphics testing strategy ========================= In the Iris Travis matrix, and over time, graphics tests must run with @@ -63,8 +66,8 @@ This consists of : existing accepted reference images, for each failing test. -How to Add New 'Acceptable' Result Images to Existing Tests -======================================== +How to add new 'Acceptable' result images to existing tests +=========================================================== When you find that a graphics test in the Iris testing suite has failed, following changes in Iris or the run dependencies, this is the process @@ -109,7 +112,7 @@ you should follow: #. Now re-run the tests. The 'new' result should now be recognised and the relevant test should pass. However, some tests can perform *multiple* graphics - checks within a single testcase function : In those cases, any failing + checks within a single test case function : In those cases, any failing check will prevent the following ones from being run, so a test re-run may encounter further (new) graphical test failures. If that happens, simply repeat the check-and-accept process until all tests pass. diff --git a/docs/iris/src/developers_guide/index.rst b/docs/iris/src/developers_guide/index.rst deleted file mode 100644 index a98a9f0f3a..0000000000 --- a/docs/iris/src/developers_guide/index.rst +++ /dev/null @@ -1,19 +0,0 @@ -.. _userguide-index: - -.. This is the source doc for the user guide - -##################### - Iris developer guide -##################### - - -.. toctree:: - :maxdepth: 3 - - documenting/index.rst - gitwash/index.rst - code_format.rst - pulls.rst - tests.rst - deprecations.rst - release.rst diff --git a/docs/iris/src/developers_guide/pulls.rst b/docs/iris/src/developers_guide/pulls.rst index 6546a15642..366cedd159 100644 --- a/docs/iris/src/developers_guide/pulls.rst +++ b/docs/iris/src/developers_guide/pulls.rst @@ -1,6 +1,7 @@ .. _pr_check: -Pull Request Check List + +Pull request check List *********************** A pull request to a SciTools project master should be ready to merge into the @@ -15,7 +16,7 @@ The check list summarises criteria which will be checked before a pull request is merged. Before submitting a pull request please consider this list. -The Iris Check List +The Iris check list ==================== * Have you provided a helpful description of the Pull Request? @@ -33,17 +34,17 @@ The Iris Check List * Have new tests been provided for all additional functionality? -* Do all modified and new sourcefiles pass PEP8? +* Do all modified and new source files pass PEP8? * PEP8_ is the Python source code style guide. * There is a python module for checking pep8 compliance: python-pep8_ - * a standard Iris test checks that all sourcefiles meet PEP8 compliance + * a standard Iris test checks that all source files meet PEP8 compliance (see "iris.tests.test_coding_standards.TestCodeFormat"). -* Do all modified and new sourcefiles have a correct, up-to-date copyright +* Do all modified and new source files have a correct, up-to-date copyright header? - * a standard Iris test checks that all sourcefiles include a copyright + * a standard Iris test checks that all source files include a copyright message, including the correct year of the latest change (see "iris.tests.test_coding_standards.TestLicenseHeaders"). @@ -64,7 +65,7 @@ The Iris Check List * The documentation tests may be run with ``make doctest``, from within the ``./docs/iris`` subdirectory. -* Have you provided a 'whats new' contribution? +* Have you provided a "what's new" contribution? * this should be done for all changes that affect API or behaviour. See :ref:`whats_new_contributions` @@ -75,7 +76,7 @@ The Iris Check List * Do the documentation and code-example tests pass? - * Run with ``make doctest`` and ``make extest``, from within the subdirectory + * Run with ``make doctest`` and ``make gallerytest``, from within the subdirectory ``./docs/iris``. * note that code examples must *not* raise deprecations. This is now checked and will result in an error. @@ -85,8 +86,8 @@ The Iris Check List * ``./.travis.yml`` is used to manage the continuous integration testing. * the files ``./conda-requirements.yml`` and - ``./minimal-conda-requirements.yml`` are used to define the software - environments used, using the conda_ package manager. + ``./minimal-conda-requirements.yml`` are used to define the software + environments used, using the conda_ package manager. * Have you provided updates to supporting projects for test or example data? @@ -108,7 +109,7 @@ The Iris Check List .. _PEP8: http://www.python.org/dev/peps/pep-0008/ .. _python-pep8: https://pypi.python.org/pypi/pep8 -.. _conda: http://conda.readthedocs.io/en/latest/ +.. _conda: https://docs.conda.io/en/latest/ .. _iris-test-data: https://github.com/SciTools/iris-test-data .. _iris-sample-data: https://github.com/SciTools/iris-sample-data .. _test-images-scitools: https://github.com/SciTools/test-images-scitools diff --git a/docs/iris/src/developers_guide/release.rst b/docs/iris/src/developers_guide/release.rst index 437478a6a0..d71f149186 100644 --- a/docs/iris/src/developers_guide/release.rst +++ b/docs/iris/src/developers_guide/release.rst @@ -1,75 +1,165 @@ .. _iris_development_releases: Releases -******** +======== -A release of Iris is a tag on the SciTools/Iris Github repository. +A release of Iris is a `tag on the SciTools/Iris`_ +Github repository. -Release Branch -============== +The summary below is of the main areas that constitute the release. The final +section details the :ref:`iris_development_releases_steps` to take. -Once the features intended for the release are on master, a release branch should be created, in the SciTools/Iris repository. This will have the name: - :literal:`{major release number}.{minor release number}.x` +Release branch +-------------- + +Once the features intended for the release are on master, a release branch +should be created, in the SciTools/Iris repository. This will have the name: + + :literal:`v{major release number}.{minor release number}.x` for example: :literal:`v1.9.x` -This branch shall be used to finalise the release details in preparation for the release candidate. +This branch shall be used to finalise the release details in preparation for +the release candidate. + -Release Candidate -================= +Release candidate +----------------- -Prior to a release, a release candidate tag may be created, marked as a pre-release in github, with a tag ending with :literal:`rc` followed by a number, e.g.: +Prior to a release, a release candidate tag may be created, marked as a +pre-release in github, with a tag ending with :literal:`rc` followed by a +number, e.g.: :literal:`v1.9.0rc1` -If created, the pre-release shall be available for at least one week prior to the release being cut. User groups should be notified of the existence of the pre-release and encouraged to test the functionality. +If created, the pre-release shall be available for a minimum of two weeks +prior to the release being cut. However a 4 week period should be the goal +to allow user groups to be notified of the existence of the pre-release and +encouraged to test the functionality. -A pre-release is expected for a minor release, but not normally provided for a point release. +A pre-release is expected for a minor release, but will not for a +point release. + +If new features are required for a release after a release candidate has been +cut, a new pre-release shall be issued first. -If new features are required for a release after a release candidate has been cut, a new pre-release shall be issued first. Documentation -============= +------------- + +The documentation should include all of the what's new entries for the release. +This content should be reviewed and adapted as required. -The documentation should include all of the what's new snippets, which must be compiled into a what's new. This content should be reviewed and adapted as required and the snippets removed from the branch to produce a coherent what's new page. +Steps to achieve this can be found in the :ref:`iris_development_releases_steps`. -Upon release, the documentation shall be added to the SciTools scitools.org.uk github project's gh-pages branch as the latest documentation. -Testing the Conda Recipe -======================== +The release +----------- -Before a release is cut, the SciTools conda-recipes-scitools recipe for Iris shall be tested to build the release branch of Iris; this test recipe shall not be merged onto conda-recipes-scitools. +The final steps are to change the version string in the source of +:literal:`Iris.__init__.py` and include the release date in the relevant what's +new page within the documentation. -The Release -=========== +Once all checks are complete, the release is cut by the creation of a new tag +in the SciTools Iris repository. -The final steps are to change the version string in the source of :literal:`Iris.__init__.py` and include the release date in the relevant what's new page within the documentation. -Once all checks are complete, the release is cut by the creation of a new tag in the SciTools Iris repository. +Conda recipe +------------ -Conda Recipe -============ +Once a release is cut, the `Iris feedstock`_ for the conda recipe must be +updated to build the latest release of Iris and push this artefact to +`conda forge`_. -Once a release is cut, the SciTools conda-recipes-scitools recipe for Iris shall be updated to build the latest release of Iris and push this artefact to anaconda.org. The build and push is all automated as part of the merge process. +.. _Iris feedstock: https://github.com/conda-forge/iris-feedstock/tree/master/recipe +.. _conda forge: https://anaconda.org/conda-forge/iris -Merge Back -========== +Merge back +---------- -After the release is cut, the changes shall be merged back onto the scitools master. +After the release is cut, the changes shall be merged back onto the +Scitools/iris master branch. -To achieve this, first cut a local branch from the release branch, :literal:`{release}.x`. Next add a commit changing the release string to match the release string on scitools/master. -This branch can now be proposed as a pull request to master. This work flow ensures that the commit identifiers are consistent between the :literal:`.x` branch and :literal:`master`. +To achieve this, first cut a local branch from the release branch, +:literal:`{release}.x`. Next add a commit changing the release string to match +the release string on scitools/master. This branch can now be proposed as a +pull request to master. This work flow ensures that the commit identifiers are +consistent between the :literal:`.x` branch and :literal:`master`. -Point Releases -============== -Bug fixes may be implemented and targeted as the :literal:`.x` branch. These should lead to a new point release, another tag. -For example, a fix for a problem with 1.9.0 will be merged into 1.9.x, and then released by tagging 1.9.1. +Point releases +-------------- + +Bug fixes may be implemented and targeted as the :literal:`.x` branch. These +should lead to a new point release, another tag. For example, a fix for a +problem with 1.9.0 will be merged into 1.9.x, and then released by tagging +1.9.1. New features shall not be included in a point release, these are for bug fixes. -A point release does not require a release candidate, but the rest of the release process is to be followed, including the merge back of changes into :literal:`master`. +A point release does not require a release candidate, but the rest of the +release process is to be followed, including the merge back of changes into +:literal:`master`. + + +.. _iris_development_releases_steps: + +Maintainer steps +---------------- + +These steps assume a release for ``v1.9`` is to be created + +Release steps +~~~~~~~~~~~~~ + +#. Create the branch ``1.9.x`` on the main repo, not in a forked repo, for the + release candidate or release. The only exception is for a point/bugfix + release as it should already exist +#. Update the what's new for the release: + + * Copy ``docs/iris/src/whatsnew/latest.rst`` to a file named + ``v1.9.rst`` + * Delete the ``docs/iris/src/whatsnew/latest.rst`` file so it will not + cause an issue in the build + * In ``v1.9.rst`` update the page title (first line of the file) to show + the date and version in the format of ``v1.9 (DD MMM YYYY)``. For + example ``v1.9 (03 Aug 2020)`` + * Review the file for correctness + * Add ``v1.9.rst`` to git and commit all changes, including removal of + ``latest.rst`` + +#. Update the what's new index ``docs/iris/src/whatsnew/index.rst`` + + * Temporarily remove reference to ``latest.rst`` + * Add a reference to ``v1.9.rst`` to the top of the list + +#. Update the ``Iris.__init__.py`` version string, to ``1.9.0`` +#. Check your changes by building the documentation and viewing the changes +#. Once all the above steps are complete, the release is cut, using + the :guilabel:`Draft a new release` button on the + `Iris release page `_ + + +Post release steps +~~~~~~~~~~~~~~~~~~ + +#. Check the documentation has built on `Read The Docs`_. The build is + triggered by any commit to master. Additionally check that the versions + available in the pop out menu in the bottom left corner include the new + release version. If it is not present you will need to configure the + versions available in the **admin** dashboard in Read The Docs +#. Copy ``docs/iris/src/whatsnew/latest.rst.template`` to + ``docs/iris/src/whatsnew/latest.rst``. This will reset + the file with the ``unreleased`` heading and placeholders for the what's + new headings +#. Add back in the reference to ``latest.rst`` to the what's new index + ``docs/iris/src/whatsnew/index.rst`` +#. Update ``Iris.__init__.py`` version string to show as ``1.10.dev0`` +#. Merge back to master + +.. _Read The Docs: https://readthedocs.org/projects/scitools-iris/builds/ +.. _tag on the SciTools/Iris: https://github.com/SciTools/iris/releases diff --git a/docs/iris/src/developers_guide/tests.rst b/docs/iris/src/developers_guide/tests.rst index 417db96f32..0322abfdba 100644 --- a/docs/iris/src/developers_guide/tests.rst +++ b/docs/iris/src/developers_guide/tests.rst @@ -7,6 +7,7 @@ The Iris tests may be run with ``python setup.py test`` which has a command line utility included. There are three categories of tests within Iris: + - Unit tests - Integration tests - Legacy tests diff --git a/docs/iris/src/index.rst b/docs/iris/src/index.rst new file mode 100644 index 0000000000..759f2f0d7e --- /dev/null +++ b/docs/iris/src/index.rst @@ -0,0 +1,89 @@ +Iris Documentation +================== + +.. todolist:: + +**A powerful, format-agnostic, community-driven Python library for analysing and +visualising Earth science data.** + +Iris implements a data model based on the `CF conventions `_ +giving you a powerful, format-agnostic interface for working with your data. +It excels when working with multi-dimensional Earth Science data, where tabular +representations become unwieldy and inefficient. + +`CF Standard names `_, +`units `_, and coordinate metadata +are built into Iris, giving you a rich and expressive interface for maintaining +an accurate representation of your data. Its treatment of data and +associated metadata as first-class objects includes: + +* visualisation interface based on `matplotlib `_ and + `cartopy `_, +* unit conversion, +* subsetting and extraction, +* merge and concatenate, +* aggregations and reductions (including min, max, mean and weighted averages), +* interpolation and regridding (including nearest-neighbor, linear and area-weighted), and +* operator overloads (``+``, ``-``, ``*``, ``/``, etc.). + +A number of file formats are recognised by Iris, including CF-compliant NetCDF, GRIB, +and PP, and it has a plugin architecture to allow other formats to be added seamlessly. + +Building upon `NumPy `_ and +`dask `_, +Iris scales from efficient single-machine workflows right through to multi-core +clusters and HPC. +Interoperability with packages from the wider scientific Python ecosystem comes from Iris' +use of standard NumPy/dask arrays as its underlying data storage. + + +.. toctree:: + :maxdepth: 1 + :caption: Getting started + + installing + generated/gallery/index + + +.. toctree:: + :maxdepth: 1 + :caption: User Guide + + userguide/index + userguide/iris_cubes + userguide/loading_iris_cubes + userguide/saving_iris_cubes + userguide/navigating_a_cube + userguide/subsetting_a_cube + userguide/real_and_lazy_data + userguide/plotting_a_cube + userguide/interpolation_and_regridding + userguide/merge_and_concat + userguide/cube_statistics + userguide/cube_maths + userguide/citation + userguide/code_maintenance + + +.. toctree:: + :maxdepth: 1 + :caption: Developers Guide + + developers_guide/contributing_documentation + developers_guide/documenting/index + developers_guide/gitwash/index + developers_guide/code_format + developers_guide/pulls + developers_guide/tests + developers_guide/deprecations + developers_guide/release + generated/api/iris + + +.. toctree:: + :maxdepth: 1 + :caption: Reference + + whatsnew/index + techpapers/index + copyright diff --git a/docs/iris/src/installing.rst b/docs/iris/src/installing.rst index 6b6999ab82..faa46afa50 100644 --- a/docs/iris/src/installing.rst +++ b/docs/iris/src/installing.rst @@ -1,7 +1,6 @@ .. _installing_iris: -**************** Installing Iris -**************** +*************** .. include:: ../../../INSTALL diff --git a/docs/iris/src/spelling_allow.txt b/docs/iris/src/spelling_allow.txt new file mode 100644 index 0000000000..6ef4134699 --- /dev/null +++ b/docs/iris/src/spelling_allow.txt @@ -0,0 +1,356 @@ +Admin +Albers +Arakawa +Arg +Args +Autoscale +Biggus +CF +Cartopy +Checklist +Color +Conda +Constraining +DAP +Dask +Debian +Duchon +EO +Eos +Exner +Fieldsfile +Fieldsfiles +FillValue +Gb +GeogCS +Hovmoller +Jul +Jun +Jupyter +Lanczos +Mappables +Matplotlib +Mb +Modeling +Mollweide +NetCDF +Nino +PPfield +PPfields +Perez +Proj +Quickplot +Regrids +Royer +Scitools +Scitools +Sep +Stehfest +Steroegraphic +Subsetting +TestCodeFormat +TestLicenseHeaders +Torvalds +Trans +Trenberth +Tri +URIs +URLs +Ubuntu +Ugrid +Unidata +Vol +Vuuren +Workflow +Yury +Zaytsev +Zorder +abf +abl +advection +aggregator +aggregators +alphap +ancils +antimeridian +ap +arg +args +arithmetic +arraylike +atol +auditable +aux +basemap +behaviour +betap +bhulev +biggus +blev +boolean +boundpoints +branchname +broadcastable +bugfix +bugfixes +builtin +bulev +carrée +cartesian +celsius +center +centrepoints +cf +cftime +chunksizes +clabel +cmap +cmpt +codebase +color +colorbar +colorbars +complevel +conda +config +constraining +convertor +coord +coords +cs +datafiles +datatype +datetime +datetimes +ddof +deepcopy +deprecations +der +dewpoint +dict +dicts +diff +discontiguities +discontiguous +djf +docstring +docstrings +doi +dom +dropdown +dtype +dtypes +dx +dy +edgecolor +endian +endianness +equirectangular +eta +etc +fh +fieldsfile +fieldsfiles +fileformat +fileformats +filename +filenames +filepath +filespec +fullname +func +geolocations +github +gregorian +grib +gribapi +gridcell +griddata +gridlines +hPa +hashable +hindcast +hyperlink +hyperlinks +idiff +ieee +ifunc +imagehash +init +inline +inplace +int +interable +interpolator +ints +io +isosurfaces +iterable +jja +kwarg +kwargs +landsea +lat +latlon +latlons +lats +lbcode +lbegin +lbext +lbfc +lbft +lblrec +lbmon +lbmond +lbnrec +lbrsvd +lbtim +lbuser +lbvc +lbyr +lbyrd +lh +lhs +linewidth +linted +linting +lon +lons +lt +mam +markup +matplotlib +matplotlibrc +max +mdtol +meaned +mercator +metadata +min +mpl +nanmask +nc +ndarray +neighbor +ness +netCDF +netcdf +netcdftime +nimrod +np +nsigma +numpy +nx +ny +online +orog +paramId +params +parsable +pcolormesh +pdf +placeholders +plugin +png +proj +ps +pseudocolor +pseudocolour +pseudocoloured +py +pyplot +quickplot +rST +rc +rd +reST +reStructuredText +rebase +rebases +rebasing +regrid +regridded +regridder +regridders +regridding +regrids +rel +repo +repos +reprojecting +rh +rhs +rst +rtol +scipy +scitools +seekable +setup +sines +sinh +spec +specs +src +ssh +st +stashcode +stashcodes +stats +std +stdout +str +subcube +subcubes +submodule +submodules +subsetting +sys +tanh +tb +testcases +tgt +th +timepoint +timestamp +timesteps +tol +tos +traceback +travis +tripolar +tuple +tuples +txt +udunits +ufunc +ugrid +ukmo +un +unhandled +unicode +unittest +unrotate +unrotated +uris +url +urls +util +var +versioning +vmax +vmin +waypoint +waypoints +whitespace +wildcard +wildcards +windspeeds +withnans +workflow +workflows +xN +xx +xxx +zeroth +zlev +zonal \ No newline at end of file diff --git a/docs/iris/src/sphinxext/auto_label_figures.py b/docs/iris/src/sphinxext/auto_label_figures.py deleted file mode 100644 index 6fb72826fe..0000000000 --- a/docs/iris/src/sphinxext/auto_label_figures.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -import os -from docutils import nodes - - -def auto_label_figures(app, doctree): - """ - Add a label on every figure. - """ - - for fig in doctree.traverse(condition=nodes.figure): - for img in fig.traverse(condition=nodes.image): - fname, ext = os.path.splitext(img['uri']) - if ext == '.png': - fname = os.path.basename(fname).replace('_', '-') - fig['ids'].append(fname) - - -def setup(app): - app.connect('doctree-read', auto_label_figures) diff --git a/docs/iris/src/sphinxext/custom_class_autodoc.py b/docs/iris/src/sphinxext/custom_class_autodoc.py index a558732bd1..cbde413f2d 100644 --- a/docs/iris/src/sphinxext/custom_class_autodoc.py +++ b/docs/iris/src/sphinxext/custom_class_autodoc.py @@ -8,9 +8,12 @@ from sphinx.ext.autodoc import * from sphinx.util import force_decode from sphinx.util.docstrings import prepare_docstring - import inspect +# stop warnings cluttering the make output +import warnings +warnings.filterwarnings("ignore") + class ClassWithConstructorDocumenter(autodoc.ClassDocumenter): priority = 1000000 @@ -80,4 +83,4 @@ def format_args(self): def setup(app): - app.add_autodocumenter(ClassWithConstructorDocumenter) + app.add_autodocumenter(ClassWithConstructorDocumenter, override=True) diff --git a/docs/iris/src/sphinxext/custom_data_autodoc.py b/docs/iris/src/sphinxext/custom_data_autodoc.py index ade07dbc4e..eecd395101 100644 --- a/docs/iris/src/sphinxext/custom_data_autodoc.py +++ b/docs/iris/src/sphinxext/custom_data_autodoc.py @@ -44,5 +44,5 @@ def handler(app, what, name, obj, options, signature, return_annotation): def setup(app): - app.add_autodocumenter(IrisDataDocumenter) + app.add_autodocumenter(IrisDataDocumenter, override=True) app.connect('autodoc-process-signature', handler) diff --git a/docs/iris/src/sphinxext/gen_example_directory.py b/docs/iris/src/sphinxext/gen_example_directory.py deleted file mode 100644 index c5de195670..0000000000 --- a/docs/iris/src/sphinxext/gen_example_directory.py +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - - -''' -Generate the rst files for the examples -''' - -import os -import re -import shutil -import sys - - -def out_of_date(original, derived): - ''' - Returns True if derivative is out-of-date wrt original, - both of which are full file paths. - - TODO: this check isn't adequate in some cases, e.g., if we discover - a bug when building the examples, the original and derived will be - unchanged but we still want to force a rebuild. - ''' - return (not os.path.exists(derived) or - os.stat(derived).st_mtime < os.stat(original).st_mtime) - - -docstring_regex = re.compile(r'[\'\"]{3}(.*?)[\'\"]{3}', re.DOTALL) - - -noplot_regex = re.compile(r'#\s*-\*-\s*noplot\s*-\*-') - - -def generate_example_rst(app): - # Example code can be found at the same level as the documentation - # src folder. - rootdir = os.path.join(os.path.dirname(app.builder.srcdir), 'example_code') - - # Examples are built as a subfolder of the src folder. - exampledir = os.path.join(app.builder.srcdir, 'examples') - - if not os.path.exists(exampledir): - os.makedirs(exampledir) - - datad = {} - for root, subFolders, files in os.walk(rootdir): - for fname in files: - if (fname.startswith('.') or fname.startswith('#') or - fname.startswith('_') or fname.find('.svn') >= 0 or - not fname.endswith('.py')): - continue - - fullpath = os.path.join(root, fname) - with open(fullpath) as fh: - contents = fh.read() - # indent - relpath = os.path.split(root)[-1] - datad.setdefault(relpath, []).append((fullpath, fname, contents)) - - subdirs = sorted(datad.keys()) - - index = [] - index.append('''\ -Iris examples -============= - -.. toctree:: - :maxdepth: 2 - -''') - - for subdir in subdirs: - rstdir = os.path.join(exampledir, subdir) - if not os.path.exists(rstdir): - os.makedirs(rstdir) - - outputdir = os.path.join(app.builder.outdir, 'examples') - if not os.path.exists(outputdir): - os.makedirs(outputdir) - - outputdir = os.path.join(outputdir, subdir) - if not os.path.exists(outputdir): - os.makedirs(outputdir) - - index.append(' {}/index.rst\n'.format(subdir)) - subdir_root_path = os.path.join(rootdir, subdir) - subdirIndex = [] - - # Use the __init__.py file's docstring for the subdir example page (if - # __init__ exists). - if os.path.exists(os.path.join(subdir_root_path, '__init__.py')): - import imp - mod = imp.load_source( - subdir, - os.path.join(subdir_root_path, '__init__.py')) - subdirIndex.append(mod.__doc__) - else: - line = 'Examples in {}\n'.format(subdir) - subdirIndex.extend([line, '=' * len(line)]) - - # Append the code to produce the toctree. - subdirIndex.append(''' -.. toctree:: - :maxdepth: 1 - -''') - - sys.stdout.write(subdir + ', ') - sys.stdout.flush() - - data = sorted(datad[subdir]) - - for fullpath, fname, contents in data: - basename, ext = os.path.splitext(fname) - outputfile = os.path.join(outputdir, fname) - - rstfile = '{}.rst'.format(basename) - outrstfile = os.path.join(rstdir, rstfile) - - subdirIndex.append(' {}\n'.format(rstfile)) - - if not out_of_date(fullpath, outrstfile): - continue - - out = [] - out.append('.. _{}-{}:\n\n'.format(subdir, basename)) - - # Copy the example code to be in the src examples directory. This - # means we can define a simple relative path in the plot directive, - # which can also copy the file into the resulting build directory. - shutil.copy(fullpath, rstdir) - - docstring_results = docstring_regex.search(contents) - if docstring_results is not None: - out.append(docstring_results.group(1)) - else: - title = '{} example code: {}'.format(subdir, fname) - out.append(title + '\n') - out.append('=' * len(title) + '\n\n') - - if not noplot_regex.search(contents): - rel_example = os.path.relpath(outputfile, app.builder.outdir) - out.append('\n\n.. plot:: {}\n'.format(rel_example)) - out.append(' :include-source:\n\n') - else: - out.append('[`source code <{}>`_]\n\n'.format(fname)) - out.append('.. literalinclude:: {}\n\n'.format(fname)) - # Write the .py file contents (we didn't need to do this for - # plots as the plot directive does this for us.) - with open(outputfile, 'w') as fhstatic: - fhstatic.write(contents) - - with open(outrstfile, 'w') as fh: - fh.writelines(out) - - subdirIndexFile = os.path.join(rstdir, 'index.rst') - with open(subdirIndexFile, 'w') as fhsubdirIndex: - fhsubdirIndex.writelines(subdirIndex) - - with open(os.path.join(exampledir, 'index.rst'), 'w') as fhindex: - fhindex.writelines(index) - - -def setup(app): - app.connect('builder-inited', generate_example_rst) diff --git a/docs/iris/src/sphinxext/gen_gallery.py b/docs/iris/src/sphinxext/gen_gallery.py deleted file mode 100644 index b4b88ff3bd..0000000000 --- a/docs/iris/src/sphinxext/gen_gallery.py +++ /dev/null @@ -1,201 +0,0 @@ -# -# (C) Copyright 2012 MATPLOTLIB (vn 1.2.0) -# - -''' -Generate a thumbnail gallery of examples. -''' - -import os -import glob -import re -import warnings - -import matplotlib.image as image -from sphinx.util import status_iterator - -from sphinx.util import status_iterator - -template = '''\ -{{% extends "layout.html" %}} -{{% set title = "Thumbnail gallery" %}} - - -{{% block body %}} - -

    Click on any image to see full size image and source code

    -
    - - - -{} -{{% endblock %}} -''' - -multiimage = re.compile('(.*?)(_\d\d){1,2}') - - -def make_thumbnail(args): - image.thumbnail(args[0], args[1], 0.4) - - -def out_of_date(original, derived): - return (not os.path.exists(derived) or - os.stat(derived).st_mtime < os.stat(original).st_mtime) - - -def gen_gallery(app, doctree): - if app.builder.name != 'html': - return - - outdir = app.builder.outdir - rootdir = 'examples' - - # Images we want to skip for the gallery because they are an unusual - # size that doesn't layout well in a table, or because they may be - # redundant with other images or uninteresting. - skips = set([ - 'mathtext_examples', - 'matshow_02', - 'matshow_03', - 'matplotlib_icon']) - - thumbnails = {} - rows = [] - random_image = [] - toc_rows = [] - - link_template = ('' - '{alternative_text}' - '') - - header_template = ('
    ' - '

    {}' - '' - '

    ') - - toc_template = ('
  • ' - '{}' - '
  • ') - - random_image_content_template = ''' -// This file was automatically generated by gen_gallery.py & should not be -// modified directly. - -images = new Array(); - -{} - -''' - - random_image_template = "['{thumbfile}', '{full_image}', '{link}'];" - random_image_join = 'images[{}] = {}' - - dirs = ('General', 'Meteorology', 'Oceanography') - - for subdir in dirs: - rows.append(header_template.format(subdir, subdir, subdir)) - toc_rows.append(toc_template.format(subdir, subdir)) - - origdir = os.path.join(os.path.dirname(outdir), rootdir, subdir) - if not os.path.exists(origdir): - origdir = os.path.join(os.path.dirname(outdir), 'plot_directive', - rootdir, subdir) - thumbdir = os.path.join(outdir, rootdir, subdir, 'thumbnails') - if not os.path.exists(thumbdir): - os.makedirs(thumbdir) - - data = [] - - for filename in sorted(glob.glob(os.path.join(origdir, '*.png'))): - if filename.endswith('hires.png'): - continue - - path, filename = os.path.split(filename) - basename, ext = os.path.splitext(filename) - if basename in skips: - continue - - # Create thumbnails based on images in tmpdir, and place them - # within the build tree. - orig_path = str(os.path.join(origdir, filename)) - thumb_path = str(os.path.join(thumbdir, filename)) - if out_of_date(orig_path, thumb_path) or True: - thumbnails[orig_path] = thumb_path - - m = multiimage.match(basename) - if m is not None: - basename = m.group(1) - - data.append((subdir, basename, - os.path.join(rootdir, subdir, 'thumbnails', - filename))) - - for (subdir, basename, thumbfile) in data: - if thumbfile is not None: - anchor = os.path.basename(thumbfile) - anchor = os.path.splitext(anchor)[0].replace('_', '-') - link = 'examples/{}/{}.html#{}'.format( - subdir, - basename, - anchor) - rows.append(link_template.format( - href=link, - thumb_file=thumbfile, - alternative_text=basename)) - random_image.append(random_image_template.format( - link=link, - thumbfile=thumbfile, - basename=basename, - full_image='_images/' + os.path.basename(thumbfile))) - - if len(data) == 0: - warnings.warn('No thumbnails were found in {}'.format(subdir)) - - # Close out the
    opened up at the top of this loop. - rows.append('
    ') - - # Generate JS list of images for front page. - random_image_content = '\n'.join([random_image_join.format(i, line) - for i, line in enumerate(random_image)]) - random_image_content = random_image_content_template.format( - random_image_content) - random_image_script_path = os.path.join(app.builder.srcdir, - '_static', - 'random_image.js') - with open(random_image_script_path, 'w') as fh: - fh.write(random_image_content) - - content = template.format('\n'.join(toc_rows), - '\n'.join(rows)) - - # Only write out the file if the contents have actually changed. - # Otherwise, this triggers a full rebuild of the docs. - - gallery_path = os.path.join(app.builder.srcdir, - '_templates', - 'gallery.html') - if os.path.exists(gallery_path): - with open(gallery_path, 'r') as fh: - regenerate = fh.read() != content - else: - regenerate = True - if regenerate: - with open(gallery_path, 'w') as fh: - fh.write(content) - - for key in status_iterator(thumbnails, 'generating thumbnails... ', - length=len(thumbnails)): - image.thumbnail(key, thumbnails[key], 0.3) - - -def setup(app): - app.connect('env-updated', gen_gallery) diff --git a/docs/iris/src/sphinxext/generate_package_rst.py b/docs/iris/src/sphinxext/generate_package_rst.py index 0c6510c170..5ce9f6d014 100644 --- a/docs/iris/src/sphinxext/generate_package_rst.py +++ b/docs/iris/src/sphinxext/generate_package_rst.py @@ -8,11 +8,23 @@ import sys import re import inspect +import ntpath + +# list of tuples for modules to exclude. Useful if the documentation throws +# warnings, especially for experimental modules. +exclude_modules = [ + ("experimental/raster", "iris.experimental.raster") # gdal conflicts +] + + +# print to stdout, including the name of the python file +def autolog(message): + print("[{}] {}".format(ntpath.basename(__file__), message)) document_dict = { # Use autoclass for classes. - 'class': ''' + "class": """ {object_docstring} .. @@ -22,20 +34,21 @@ :undoc-members: :inherited-members: -''', - 'function': ''' +""", + "function": """ .. autofunction:: {object_name} -''', +""", # For everything else, let automodule do some magic... - None: ''' + None: """ .. autodata:: {object_name} -'''} +""", +} -horizontal_sep = ''' +horizontal_sep = """ .. raw:: html

    ↑ top ↑

    @@ -47,21 +60,22 @@ --> -''' +""" def lookup_object_type(obj): if inspect.isclass(obj): - return 'class' + return "class" elif inspect.isfunction(obj): - return 'function' + return "function" else: return None -def auto_doc_module(file_path, import_name, root_package, - package_toc=None, title=None): - doc = r'''.. _{import_name}: +def auto_doc_module( + file_path, import_name, root_package, package_toc=None, title=None +): + doc = r""".. _{import_name}: {title_underline} {title} @@ -77,54 +91,66 @@ def auto_doc_module(file_path, import_name, root_package, {module_elements} +""" -''' if package_toc: - sidebar = ''' -.. sidebar:: Modules in this package - + sidebar = """ {package_toc_tree} - '''.format(package_toc_tree=package_toc) + """.format( + package_toc_tree=package_toc + ) else: - sidebar = '' + sidebar = "" try: mod = __import__(import_name) except ImportError as e: - message = r'''.. error:: + message = r""".. error:: This module could not be imported. Some dependencies are missing:: - ''' + str(e) - return doc.format(title=title or import_name, - title_underline='=' * len(title or import_name), - import_name=import_name, root_package=root_package, - sidebar=sidebar, module_elements=message) + """ + str( + e + ) + return doc.format( + title=title or import_name, + title_underline="=" * len(title or import_name), + import_name=import_name, + root_package=root_package, + sidebar=sidebar, + module_elements=message, + ) mod = sys.modules[import_name] elems = dir(mod) - if '__all__' in elems: - document_these = [(attr_name, getattr(mod, attr_name)) - for attr_name in mod.__all__] + if "__all__" in elems: + document_these = [ + (attr_name, getattr(mod, attr_name)) for attr_name in mod.__all__ + ] else: - document_these = [(attr_name, getattr(mod, attr_name)) - for attr_name in elems - if (not attr_name.startswith('_') and - not inspect.ismodule(getattr(mod, attr_name)))] + document_these = [ + (attr_name, getattr(mod, attr_name)) + for attr_name in elems + if ( + not attr_name.startswith("_") + and not inspect.ismodule(getattr(mod, attr_name)) + ) + ] def is_from_this_module(arg): - name = arg[0] + # name = arg[0] obj = arg[1] - return (hasattr(obj, '__module__') and - obj.__module__ == mod.__name__) + return ( + hasattr(obj, "__module__") and obj.__module__ == mod.__name__ + ) - sort_order = {'class': 2, 'function': 1} + sort_order = {"class": 2, "function": 1} # Sort them according to sort_order dict. def sort_key(arg): - name = arg[0] + # name = arg[0] obj = arg[1] return sort_order.get(lookup_object_type(obj), 0) @@ -133,63 +159,81 @@ def sort_key(arg): lines = [] for element, obj in document_these: - object_name = import_name + '.' + element + object_name = import_name + "." + element obj_content = document_dict[lookup_object_type(obj)].format( object_name=object_name, - object_name_header_line='+' * len(object_name), - object_docstring=inspect.getdoc(obj)) + object_name_header_line="+" * len(object_name), + object_docstring=inspect.getdoc(obj), + ) lines.append(obj_content) lines = horizontal_sep.join(lines) - module_elements = '\n'.join(' * :py:obj:`{}`'.format(element) - for element, obj in document_these) + module_elements = "\n".join( + " * :py:obj:`{}`".format(element) for element, obj in document_these + ) lines = doc + lines - return lines.format(title=title or import_name, - title_underline='=' * len(title or import_name), - import_name=import_name, root_package=root_package, - sidebar=sidebar, module_elements=module_elements) + return lines.format( + title=title or import_name, + title_underline="=" * len(title or import_name), + import_name=import_name, + root_package=root_package, + sidebar=sidebar, + module_elements=module_elements, + ) def auto_doc_package(file_path, import_name, root_package, sub_packages): - max_depth = 1 if import_name == 'iris' else 2 - package_toc = '\n '.join(sub_packages) - package_toc = ''' + max_depth = 1 if import_name == "iris" else 2 + package_toc = "\n ".join(sub_packages) + + package_toc = """ .. toctree:: :maxdepth: {:d} :titlesonly: + :hidden: {} -'''.format(max_depth, package_toc) +""".format( + max_depth, package_toc + ) - if '.' in import_name: + if "." in import_name: title = None else: - title = import_name.capitalize() + ' reference documentation' + title = import_name.capitalize() + " API" - return auto_doc_module(file_path, import_name, root_package, - package_toc=package_toc, title=title) + return auto_doc_module( + file_path, + import_name, + root_package, + package_toc=package_toc, + title=title, + ) def auto_package_build(app): root_package = app.config.autopackage_name if root_package is None: - raise ValueError('set the autopackage_name variable in the ' - 'conf.py file') + raise ValueError( + "set the autopackage_name variable in the " "conf.py file" + ) if not isinstance(root_package, list): - raise ValueError('autopackage was expecting a list of packages to ' - 'document e.g. ["itertools"]') + raise ValueError( + "autopackage was expecting a list of packages to " + 'document e.g. ["itertools"]' + ) for package in root_package: do_package(package) def do_package(package_name): - out_dir = package_name + os.path.sep + out_dir = "generated/api" + os.path.sep # Import the root package. If this fails then an import error will be # raised. @@ -199,38 +243,45 @@ def do_package(package_name): package_folder = [] module_folders = {} + for root, subFolders, files in os.walk(rootdir): for fname in files: name, ext = os.path.splitext(fname) # Skip some non-relevant files. - if (fname.startswith('.') or fname.startswith('#') or - re.search('^_[^_]', fname) or fname.find('.svn') >= 0 or - not (ext in ['.py', '.so'])): + if ( + fname.startswith(".") + or fname.startswith("#") + or re.search("^_[^_]", fname) + or fname.find(".svn") >= 0 + or not (ext in [".py", ".so"]) + ): continue # Handle new shared library naming conventions - if ext == '.so': - name = name.split('.', 1)[0] + if ext == ".so": + name = name.split(".", 1)[0] - rel_path = root_package + \ - os.path.join(root, fname).split(rootdir)[-1] - mod_folder = root_package + \ - os.path.join(root).split(rootdir)[-1].replace('/', '.') + rel_path = ( + root_package + os.path.join(root, fname).split(rootdir)[-1] + ) + mod_folder = root_package + os.path.join(root).split(rootdir)[ + -1 + ].replace("/", ".") # Only add this package to folder list if it contains an __init__ # script. - if name == '__init__': + if name == "__init__": package_folder.append([mod_folder, rel_path]) else: - import_name = mod_folder + '.' + name + import_name = mod_folder + "." + name mf_list = module_folders.setdefault(mod_folder, []) mf_list.append((import_name, rel_path)) if not os.path.exists(out_dir): os.makedirs(out_dir) for package, package_path in package_folder: - if '._' in package or 'test' in package: + if "._" in package or "test" in package: continue paths = [] @@ -242,60 +293,83 @@ def do_package(package_name): continue if not spackage.startswith(package): continue - if spackage.count('.') > package.count('.') + 1: + if spackage.count(".") > package.count(".") + 1: continue - if 'test' in spackage: + if "test" in spackage: continue - split_path = spackage.rsplit('.', 2)[-2:] - if any(part[0] == '_' for part in split_path): + split_path = spackage.rsplit(".", 2)[-2:] + if any(part[0] == "_" for part in split_path): continue - paths.append(os.path.join(*split_path) + '.rst') + paths.append(os.path.join(*split_path) + ".rst") - paths.extend(os.path.join(os.path.basename(os.path.dirname(path)), - os.path.basename(path).split('.', 1)[0]) - for imp_name, path in module_folders.get(package, [])) + paths.extend( + os.path.join( + os.path.basename(os.path.dirname(path)), + os.path.basename(path).split(".", 1)[0], + ) + for imp_name, path in module_folders.get(package, []) + ) paths.sort() + + # check for any modules to exclude + for exclude_module in exclude_modules: + if exclude_module[0] in paths: + autolog( + "Excluding module in package: {}".format(exclude_module[0]) + ) + paths.remove(exclude_module[0]) + doc = auto_doc_package(package_path, package, root_package, paths) - package_dir = out_dir + package.replace('.', os.path.sep) + package_dir = out_dir + package.replace(".", os.path.sep) if not os.path.exists(package_dir): - os.makedirs(out_dir + package.replace('.', os.path.sep)) + os.makedirs(out_dir + package.replace(".", os.path.sep)) - out_path = package_dir + '.rst' + out_path = package_dir + ".rst" if not os.path.exists(out_path): - print('Creating non-existent document {} ...'.format(out_path)) - with open(out_path, 'w') as fh: + autolog("Creating {} ...".format(out_path)) + with open(out_path, "w") as fh: fh.write(doc) else: - with open(out_path, 'r') as fh: - existing_content = ''.join(fh.readlines()) + with open(out_path, "r") as fh: + existing_content = "".join(fh.readlines()) if doc != existing_content: - print('Creating out of date document {} ...'.format( - out_path)) - with open(out_path, 'w') as fh: + autolog("Creating {} ...".format(out_path)) + with open(out_path, "w") as fh: fh.write(doc) for import_name, module_path in module_folders.get(package, []): - doc = auto_doc_module(module_path, import_name, root_package) - out_path = out_dir + import_name.replace('.', os.path.sep) + '.rst' - if not os.path.exists(out_path): - print('Creating non-existent document {} ...'.format( - out_path)) - with open(out_path, 'w') as fh: - fh.write(doc) - else: - with open(out_path, 'r') as fh: - existing_content = ''.join(fh.readlines()) - if doc != existing_content: - print('Creating out of date document {} ...'.format( - out_path)) - with open(out_path, 'w') as fh: - fh.write(doc) + # check for any modules to exclude + for exclude_module in exclude_modules: + if import_name == exclude_module[1]: + autolog( + "Excluding module file: {}".format(exclude_module[1]) + ) + else: + doc = auto_doc_module( + module_path, import_name, root_package + ) + out_path = ( + out_dir + + import_name.replace(".", os.path.sep) + + ".rst" + ) + if not os.path.exists(out_path): + autolog("Creating {} ...".format(out_path)) + with open(out_path, "w") as fh: + fh.write(doc) + else: + with open(out_path, "r") as fh: + existing_content = "".join(fh.readlines()) + if doc != existing_content: + autolog("Creating {} ...".format(out_path)) + with open(out_path, "w") as fh: + fh.write(doc) def setup(app): - app.connect('builder-inited', auto_package_build) - app.add_config_value('autopackage_name', None, 'env') + app.connect("builder-inited", auto_package_build) + app.add_config_value("autopackage_name", None, "env") diff --git a/docs/iris/src/whitepapers/change_management.rst b/docs/iris/src/techpapers/change_management.rst similarity index 95% rename from docs/iris/src/whitepapers/change_management.rst rename to docs/iris/src/techpapers/change_management.rst index b279c91b96..d09237a4bf 100644 --- a/docs/iris/src/whitepapers/change_management.rst +++ b/docs/iris/src/techpapers/change_management.rst @@ -1,3 +1,5 @@ +:orphan: + .. _change_management: Change Management in Iris from the User's perspective @@ -44,25 +46,28 @@ User Actions : How you should respond to changes and releases Checklist : * when a new **testing or candidate version** is announced - if convenient, test your working legacy code against it and report any problems. + + * if convenient, test your working legacy code against it and report any problems. * when a new **minor version is released** - * review the 'Whats New' documentation to see if it introduces any - deprecations that may affect you. - * run your working legacy code and check for any deprecation warnings, - indicating that modifications may be necessary at some point - * when convenient : + * review the 'What's New' documentation to see if it introduces any + deprecations that may affect you. + * run your working legacy code and check for any deprecation warnings, + indicating that modifications may be necessary at some point + * when convenient : * review existing code for use of deprecated features * rewrite code to replace deprecated features * when a new major version is **announced** - ensure your code runs, without producing deprecation warnings, in the + + * ensure your code runs, without producing deprecation warnings, in the previous minor release * when a new major version is **released** - check for new deprecation warnings, as for a minor release + + * check for new deprecation warnings, as for a minor release Details @@ -81,6 +86,7 @@ Our practices are intended be compatible with the principles defined in the `SemVer project `_ . Key concepts covered here: + * :ref:`Release versions ` * :ref:`Backwards compatibility ` * :ref:`Deprecation ` @@ -95,18 +101,18 @@ Backwards compatibility usages unchanged (see :ref:`terminology ` below). Minor releases may only include backwards-compatible changes. -The following are examples of backward-compatible changes : +The following are examples of backward-compatible changes: * changes to documentation * adding to a module : new submodules, functions, classes or properties * adding to a class : new methods or properties * adding to a function or method : new **optional** arguments or keywords -The following are examples of **non-** backward-compatible changes : +The following are examples of **non-** backward-compatible changes: * removing (which includes *renaming*) any public module or submodule * removing any public component : a module, class, method, function or - data object property of a public API component + data object property of a public API component * removing any property of a public object * removing an argument or keyword from a method or function * adding a required argument to a method or function @@ -221,7 +227,7 @@ are : * A non-zero "" denotes a bugfix version, thus a release "X.Y.0" may be followed by "X.Y.1", "X.Y.2" etc, which *only* differ by containing - bugfixes. Any bugfix release supercedes its predecessors, and does not + bugfixes. Any bugfix release supersedes its predecessors, and does not change any (valid) APIs or behaviour : hence, it is always advised to replace a given version with its latest bugfix successor, and there should be no reason not to. @@ -255,7 +261,7 @@ behaviour of existing code can only be made at a **major** release, i.e. when "X.0" is released following the last previous "(X-1).Y.Z". *Minor* releases, by contrast, consist of bugfixes, new features, and -deprecations : Any valid exisiting code should be unaffected by these, so it +deprecations : Any valid existing code should be unaffected by these, so it will still run with the same results. At a major release, only **deprecated** behaviours and APIs can be changed or @@ -355,7 +361,7 @@ with the new release, which we obviously need to avoid. * the user code usage is simply by calls to "iris.load" * the change is not a bugfix, as the old way isn't actually "wrong" * we don't want to add an extra keyword into all the relevant calls - * we don't see a longterm future for the existing behaviour : we + * we don't see a long term future for the existing behaviour : we expect everyone to adopt the new interpretation, eventually For changes of this sort, the release will define a new boolean property of the @@ -421,7 +427,7 @@ At (major) release ".0...": * If your code is explicitly turning the option off, it will continue to work in the same way at this point, but obviously time is - runnning out. + running out. * If your code is still using the old behaviour and *not* setting the control option at all, its behaviour might now have changed diff --git a/docs/iris/src/whitepapers/index.rst b/docs/iris/src/techpapers/index.rst similarity index 54% rename from docs/iris/src/whitepapers/index.rst rename to docs/iris/src/techpapers/index.rst index dd0876d257..773c8f7059 100644 --- a/docs/iris/src/whitepapers/index.rst +++ b/docs/iris/src/techpapers/index.rst @@ -1,8 +1,9 @@ -.. _whitepapers_index: +.. _techpapers_index: + + +Iris Technical Papers +===================== -============================ -Iris technical 'Whitepapers' -============================ Extra information on specific technical issues. .. toctree:: diff --git a/docs/iris/src/whitepapers/missing_data_handling.rst b/docs/iris/src/techpapers/missing_data_handling.rst similarity index 100% rename from docs/iris/src/whitepapers/missing_data_handling.rst rename to docs/iris/src/techpapers/missing_data_handling.rst diff --git a/docs/iris/src/whitepapers/um_files_loading.rst b/docs/iris/src/techpapers/um_files_loading.rst similarity index 99% rename from docs/iris/src/whitepapers/um_files_loading.rst rename to docs/iris/src/techpapers/um_files_loading.rst index fd2d2a2341..d8c796b31f 100644 --- a/docs/iris/src/whitepapers/um_files_loading.rst +++ b/docs/iris/src/techpapers/um_files_loading.rst @@ -30,7 +30,7 @@ Notes: #. Iris treats Fieldsfile data almost exactly as if it were PP -- i.e. it treats each field's lookup table entry like a PP header. -#. The Iris datamodel is based on +#. The Iris data model is based on `NetCDF CF conventions `_, so most of this can also be seen as a metadata translation between PP and CF terms, but it is easier to discuss in terms of Iris elements. @@ -132,7 +132,7 @@ For an ordinary latitude-longitude grid, the cubes have coordinates called ``ZDX/Y + BDX/Y * (1 .. LBNPT/LBROW)`` (*except*, if BDX/BDY is zero, the values are taken from the extra data vector X/Y, if present). * If X/Y_LOWER_BOUNDS extra data is available, this appears as bounds values - of the horizontal cooordinates. + of the horizontal coordinates. For **rotated** latitude-longitude coordinates (as for LBCODE=101), the horizontal coordinates differ only slightly -- diff --git a/docs/iris/src/userguide/citation.rst b/docs/iris/src/userguide/citation.rst index 01b655574e..7ce0a8ffc0 100644 --- a/docs/iris/src/userguide/citation.rst +++ b/docs/iris/src/userguide/citation.rst @@ -23,7 +23,7 @@ For example:: ******************* -Downloaded Software +Downloaded software ******************* Suggested format:: @@ -36,7 +36,7 @@ For example:: ******************** -Checked out Software +Checked out software ******************** Suggested format:: diff --git a/docs/iris/src/userguide/code_maintenance.rst b/docs/iris/src/userguide/code_maintenance.rst index 00ba30506c..d03808e18f 100644 --- a/docs/iris/src/userguide/code_maintenance.rst +++ b/docs/iris/src/userguide/code_maintenance.rst @@ -1,31 +1,31 @@ -Code Maintenance +Code maintenance ================ From a user point of view "code maintenance" means ensuring that your existing working code stays working, in the face of changes to Iris. -Stability and Change +Stability and change --------------------- In practice, as Iris develops, most users will want to periodically upgrade -their installed version to access new features or at least bugfixes. +their installed version to access new features or at least bug fixes. This is obvious if you are still developing other code that uses Iris, or using code from other sources. However, even if you have only legacy code that remains untouched, some code -maintenance effort is probably still necessary : +maintenance effort is probably still necessary: * On the one hand, *in principle*, working code will go on working, as long as you don't change anything else. - * However, such "version statis" can easily become a growing burden, if you - are simply waiting until an update becomes unavoidable : Often, that will + * However, such "version stasis" can easily become a growing burden, if you + are simply waiting until an update becomes unavoidable, often that will eventually occur when you need to update some other software component, for some completely unconnected reason. -Principles of Change Management +Principles of change management ------------------------------- When you upgrade software to a new version, you often find that you need to diff --git a/docs/iris/src/userguide/cube_maths.rst b/docs/iris/src/userguide/cube_maths.rst index 8fe6eb12d5..0ac2b8da74 100644 --- a/docs/iris/src/userguide/cube_maths.rst +++ b/docs/iris/src/userguide/cube_maths.rst @@ -60,6 +60,10 @@ but with the data representing their difference: Scalar coordinates: forecast_reference_time: 1859-09-01 06:00:00 height: 1.5 m + Attributes: + Conventions: CF-1.5 + Model scenario: E1 + source: Data from Met Office Unified Model 6.05 .. note:: @@ -208,7 +212,7 @@ The result could now be plotted using the guidance provided in the .. only:: html A very similar example to this can be found in - :doc:`/examples/Meteorology/deriving_phenomena`. + :ref:`sphx_glr_generated_gallery_meteorology_plot_deriving_phenomena.py`. .. only:: latex diff --git a/docs/iris/src/userguide/cube_statistics.rst b/docs/iris/src/userguide/cube_statistics.rst index 3ca7d9a2e0..310551c76f 100644 --- a/docs/iris/src/userguide/cube_statistics.rst +++ b/docs/iris/src/userguide/cube_statistics.rst @@ -93,7 +93,8 @@ can be used instead of ``MEAN``, see :mod:`iris.analysis` for a full list of currently supported operators. For an example of using this functionality, the -:ref:`Hovmoller diagram ` example found +:ref:`sphx_glr_generated_gallery_meteorology_plot_hovmoller.py` +example found in the gallery takes a zonal mean of an ``XYT`` cube by using the ``collapsed`` method with ``latitude`` and ``iris.analysis.MEAN`` as arguments. @@ -147,7 +148,7 @@ These areas can now be passed to the ``collapsed`` method as weights: Several examples of area averaging exist in the gallery which may be of interest, including an example on taking a :ref:`global area-weighted mean -`. +`. .. _cube-statistics-aggregated-by: @@ -244,7 +245,7 @@ These two coordinates can now be used to aggregate by season and climate-year: The primary change in the cube is that the cube's data has been reduced in the 'time' dimension by aggregation (taking means, in this case). -This has collected together all datapoints with the same values of season and +This has collected together all data points with the same values of season and season-year. The results are now indexed by the 19 different possible values of season and season-year in a new, reduced 'time' dimension. diff --git a/docs/iris/src/userguide/end_of_userguide.rst b/docs/iris/src/userguide/end_of_userguide.rst deleted file mode 100644 index c8f951a634..0000000000 --- a/docs/iris/src/userguide/end_of_userguide.rst +++ /dev/null @@ -1,15 +0,0 @@ -End of the user guide -===================== - -If this was your first time reading the user guide, we hope you found it enjoyable and informative. -It is advised that you now go back to the :doc:`start ` and try experimenting with your own data. - - - -Iris gallery ------------- -It can be very daunting to start coding a project from an empty file, that is why you will find many in-depth -examples in the Iris gallery which can be used as a goal driven reference to producing your own visualisations. - -If you produce a visualisation which you think would add value to the gallery, please get in touch with us and -we will consider including it as an example for all to benefit from. diff --git a/docs/iris/src/userguide/index.rst b/docs/iris/src/userguide/index.rst index 4fb7b62155..2a3b32fe11 100644 --- a/docs/iris/src/userguide/index.rst +++ b/docs/iris/src/userguide/index.rst @@ -1,11 +1,9 @@ .. _user_guide_index: +.. _user_guide_introduction: -=============== -Iris user guide -=============== +Introduction +============ -How to use the user guide ---------------------------- If you are reading this user guide for the first time it is strongly recommended that you read the user guide fully before experimenting with your own data files. @@ -18,24 +16,16 @@ links in order to understand the guide but they may serve as a useful reference Since later pages depend on earlier ones, try reading this user guide sequentially using the ``next`` and ``previous`` links. -User guide table of contents -------------------------------- - -.. toctree:: - :maxdepth: 2 - :numbered: - - iris_cubes.rst - loading_iris_cubes.rst - saving_iris_cubes.rst - navigating_a_cube.rst - subsetting_a_cube.rst - real_and_lazy_data.rst - plotting_a_cube.rst - interpolation_and_regridding.rst - merge_and_concat.rst - cube_statistics.rst - cube_maths.rst - citation.rst - code_maintenance.rst - end_of_userguide.rst +* :doc:`iris_cubes` +* :doc:`loading_iris_cubes` +* :doc:`saving_iris_cubes` +* :doc:`navigating_a_cube` +* :doc:`subsetting_a_cube` +* :doc:`real_and_lazy_data` +* :doc:`plotting_a_cube` +* :doc:`interpolation_and_regridding` +* :doc:`merge_and_concat` +* :doc:`cube_statistics` +* :doc:`cube_maths` +* :doc:`citation` +* :doc:`code_maintenance` diff --git a/docs/iris/src/userguide/interpolation_and_regridding.rst b/docs/iris/src/userguide/interpolation_and_regridding.rst index 565f9b61eb..65ac36eada 100644 --- a/docs/iris/src/userguide/interpolation_and_regridding.rst +++ b/docs/iris/src/userguide/interpolation_and_regridding.rst @@ -1,6 +1,5 @@ .. _interpolation_and_regridding: - .. testsetup:: * import numpy as np @@ -16,7 +15,7 @@ Iris provides powerful cube-aware interpolation and regridding functionality, exposed through Iris cube methods. This functionality is provided by building upon existing interpolation schemes implemented by SciPy. -In Iris we refer to the avaliable types of interpolation and regridding as +In Iris we refer to the available types of interpolation and regridding as `schemes`. The following are the interpolation schemes that are currently available in Iris: diff --git a/docs/iris/src/userguide/iris_cubes.rst b/docs/iris/src/userguide/iris_cubes.rst index dc423afba1..5929c402f2 100644 --- a/docs/iris/src/userguide/iris_cubes.rst +++ b/docs/iris/src/userguide/iris_cubes.rst @@ -1,13 +1,9 @@ -.. _user_guide_introduction: - -=================== -Introduction -=================== - .. _iris_data_structures: +==================== Iris data structures --------------------- +==================== + The top level object in Iris is called a cube. A cube contains data and metadata about a phenomenon. In Iris, a cube is an interpretation of the *Climate and Forecast (CF) Metadata Conventions* whose purpose is to: @@ -33,6 +29,7 @@ by definition, its phenomenon. * Each coordinate has a name and a unit. * When a coordinate is added to a cube, the data dimensions that it represents are also provided. + * The shape of a coordinate is always the same as the shape of the associated data dimension(s) on the cube. * A dimension not explicitly listed signifies that the coordinate is independent of that dimension. * Each dimension of a coordinate must be mapped to a data dimension. The only coordinates with no mapping are diff --git a/docs/iris/src/userguide/loading_iris_cubes.rst b/docs/iris/src/userguide/loading_iris_cubes.rst index bf50acc614..006a919408 100644 --- a/docs/iris/src/userguide/loading_iris_cubes.rst +++ b/docs/iris/src/userguide/loading_iris_cubes.rst @@ -38,10 +38,12 @@ This shows that there were 2 cubes as a result of loading the file, they were: ``air_potential_temperature`` and ``surface_altitude``. The ``surface_altitude`` cube was 2 dimensional with: + * the two dimensions have extents of 204 and 187 respectively and are represented by the ``grid_latitude`` and ``grid_longitude`` coordinates. The ``air_potential_temperature`` cubes were 4 dimensional with: + * the same length ``grid_latitude`` and ``grid_longitude`` dimensions as ``surface_altitide`` * a ``time`` dimension of length 3 @@ -267,7 +269,7 @@ boundary of a circular coordinate (this is often the meridian or the dateline / antimeridian). An example use-case of this is to extract the entire Pacific Ocean from a cube whose longitudes are bounded by the dateline. -This functionality cannot be provided reliably using contraints. Instead you should use the +This functionality cannot be provided reliably using constraints. Instead you should use the functionality provided by :meth:`cube.intersection ` to extract this region. diff --git a/docs/iris/src/userguide/merge_and_concat.rst b/docs/iris/src/userguide/merge_and_concat.rst index b742b3ef5f..0d844ac403 100644 --- a/docs/iris/src/userguide/merge_and_concat.rst +++ b/docs/iris/src/userguide/merge_and_concat.rst @@ -1,7 +1,7 @@ .. _merge_and_concat: ===================== -Merge and Concatenate +Merge and concatenate ===================== We saw in the :doc:`loading_iris_cubes` chapter that Iris tries to load as few cubes as diff --git a/docs/iris/src/userguide/navigating_a_cube.rst b/docs/iris/src/userguide/navigating_a_cube.rst index 055617e047..581d1a67cf 100644 --- a/docs/iris/src/userguide/navigating_a_cube.rst +++ b/docs/iris/src/userguide/navigating_a_cube.rst @@ -229,7 +229,7 @@ by field basis *before* they are automatically merged together: # Add our own realization coordinate if it doesn't already exist. if not cube.coords('realization'): realization = np.int32(filename[-6:-3]) - ensemble_coord = icoords.AuxCoord(realization, standard_name='realization') + ensemble_coord = icoords.AuxCoord(realization, standard_name='realization', units="1") cube.add_aux_coord(ensemble_coord) filename = iris.sample_data_path('GloSea4', '*.pp') diff --git a/docs/iris/src/userguide/plotting_a_cube.rst b/docs/iris/src/userguide/plotting_a_cube.rst index d82cbbb027..f646aa4b3e 100644 --- a/docs/iris/src/userguide/plotting_a_cube.rst +++ b/docs/iris/src/userguide/plotting_a_cube.rst @@ -190,7 +190,7 @@ and providing the label keyword to identify it. Once all of the lines have been added the :func:`matplotlib.pyplot.legend` function can be called to indicate that a legend is desired: -.. plot:: ../example_code/General/lineplot_with_legend.py +.. plot:: ../gallery_code/general/plot_lineplot_with_legend.py :include-source: This example of consecutive ``qplt.plot`` calls coupled with the @@ -272,7 +272,7 @@ Brewer colour palettes *********************** Iris includes colour specifications and designs developed by -`Cynthia Brewer `_. +`Cynthia Brewer `_ These colour schemes are freely available under the following licence:: Apache-Style Software License for ColorBrewer software and ColorBrewer Color Schemes @@ -298,7 +298,7 @@ For adding citations to Iris plots, see :ref:`brewer-cite` (below). Available Brewer Schemes ======================== The following subset of Brewer palettes found at -`colorbrewer.org `_ are available within Iris. +`colorbrewer2.org `_ are available within Iris. .. plot:: userguide/plotting_examples/brewer.py diff --git a/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py b/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py index 30a5fc4318..f3772328ab 100644 --- a/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py +++ b/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py @@ -11,4 +11,5 @@ temperature_1d = temperature[5, :] qplt.plot(temperature_1d) + plt.show() diff --git a/docs/iris/src/userguide/plotting_examples/1d_simple.py b/docs/iris/src/userguide/plotting_examples/1d_simple.py index b76752ac18..ea90faf402 100644 --- a/docs/iris/src/userguide/plotting_examples/1d_simple.py +++ b/docs/iris/src/userguide/plotting_examples/1d_simple.py @@ -11,4 +11,5 @@ temperature_1d = temperature[5, :] iplt.plot(temperature_1d) + plt.show() diff --git a/docs/iris/src/userguide/plotting_examples/1d_with_legend.py b/docs/iris/src/userguide/plotting_examples/1d_with_legend.py index 1ee75e1ed9..26aeeef9a6 100644 --- a/docs/iris/src/userguide/plotting_examples/1d_with_legend.py +++ b/docs/iris/src/userguide/plotting_examples/1d_with_legend.py @@ -1,5 +1,4 @@ import matplotlib.pyplot as plt - import iris import iris.plot as iplt diff --git a/docs/iris/src/userguide/plotting_examples/brewer.py b/docs/iris/src/userguide/plotting_examples/brewer.py index e4533a28f5..f2ede9f9bc 100644 --- a/docs/iris/src/userguide/plotting_examples/brewer.py +++ b/docs/iris/src/userguide/plotting_examples/brewer.py @@ -4,19 +4,26 @@ import iris.palette -a = np.linspace(0, 1, 256).reshape(1, -1) -a = np.vstack((a, a)) - -maps = sorted(iris.palette.CMAP_BREWER) -nmaps = len(maps) - -fig = plt.figure(figsize=(7, 10)) -fig.subplots_adjust(top=0.99, bottom=0.01, left=0.2, right=0.99) -for i, m in enumerate(maps): - ax = plt.subplot(nmaps, 1, i + 1) - plt.axis("off") - plt.imshow(a, aspect="auto", cmap=plt.get_cmap(m), origin="lower") - pos = list(ax.get_position().bounds) - fig.text(pos[0] - 0.01, pos[1], m, fontsize=8, horizontalalignment="right") - -plt.show() +def main(): + a = np.linspace(0, 1, 256).reshape(1, -1) + a = np.vstack((a, a)) + + maps = sorted(iris.palette.CMAP_BREWER) + nmaps = len(maps) + + fig = plt.figure(figsize=(7, 10)) + fig.subplots_adjust(top=0.99, bottom=0.01, left=0.2, right=0.99) + for i, m in enumerate(maps): + ax = plt.subplot(nmaps, 1, i + 1) + plt.axis("off") + plt.imshow(a, aspect="auto", cmap=plt.get_cmap(m), origin="lower") + pos = list(ax.get_position().bounds) + fig.text( + pos[0] - 0.01, pos[1], m, fontsize=8, horizontalalignment="right" + ) + + plt.show() + + +if __name__ == "__main__": + main() diff --git a/docs/iris/src/userguide/plotting_examples/cube_blockplot.py b/docs/iris/src/userguide/plotting_examples/cube_blockplot.py index cd380f5e35..0961a97fdb 100644 --- a/docs/iris/src/userguide/plotting_examples/cube_blockplot.py +++ b/docs/iris/src/userguide/plotting_examples/cube_blockplot.py @@ -1,5 +1,4 @@ import matplotlib.pyplot as plt - import iris import iris.quickplot as qplt diff --git a/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py b/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py index 6dce2b39de..45ba800485 100644 --- a/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py +++ b/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py @@ -1,5 +1,4 @@ import matplotlib.pyplot as plt - import iris import iris.quickplot as qplt import iris.plot as iplt diff --git a/docs/iris/src/userguide/saving_iris_cubes.rst b/docs/iris/src/userguide/saving_iris_cubes.rst index ecf2210810..fa67b6213d 100644 --- a/docs/iris/src/userguide/saving_iris_cubes.rst +++ b/docs/iris/src/userguide/saving_iris_cubes.rst @@ -6,8 +6,8 @@ Saving Iris cubes Iris supports the saving of cubes and cube lists to: -* CF netCDF (1.5) -* GRIB (edition 2) +* CF netCDF (version 1.6) +* GRIB edition 2 (if `iris-grib `_ is installed) * Met Office PP @@ -57,7 +57,6 @@ The :py:func:`iris.save` function passes all other keywords through to the saver See * :py:func:`iris.fileformats.netcdf.save` -* :py:func:`iris.fileformats.grib.save_grib2` * :py:func:`iris.fileformats.pp.save` for more details on supported arguments for the individual savers. @@ -70,14 +69,14 @@ When saving to GRIB or PP, the save process may be intercepted between the trans For example, a GRIB2 message with a particular known long_name may need to be saved to a specific parameter code and type of statistical process. This can be achieved by:: def tweaked_messages(cube): - for cube, grib_message in iris.fileformats.grib.as_pairs(cube): + for cube, grib_message in iris_grib.save_pairs_from_cube(cube): # post process the GRIB2 message, prior to saving if cube.name() == 'carefully_customised_precipitation_amount': gribapi.grib_set_long(grib_message, "typeOfStatisticalProcess", 1) gribapi.grib_set_long(grib_message, "parameterCategory", 1) gribapi.grib_set_long(grib_message, "parameterNumber", 1) yield grib_message - iris.fileformats.grib.save_messages(tweaked_messages(cubes[0]), '/tmp/agrib2.grib2') + iris_grib.save_messages(tweaked_messages(cubes[0]), '/tmp/agrib2.grib2') Similarly a PP field may need to be written out with a specific value for LBEXP. This can be achieved by:: @@ -98,7 +97,7 @@ netCDF NetCDF is a flexible container for metadata and cube metadata is closely related to the CF for netCDF semantics. This means that cube metadata is well represented in netCDF files, closely resembling the in memory metadata representation. Thus there is no provision for similar save customisation functionality for netCDF saving, all customisations should be applied to the cube prior to saving to netCDF. -Bespoke Saver +Bespoke saver -------------- A bespoke saver may be written to support an alternative file format. This can be provided to the :py:func:`iris.save` function, enabling Iris to write to a different file format. diff --git a/docs/iris/src/whatsnew/1.0.rst b/docs/iris/src/whatsnew/1.0.rst index 2a415c1bfe..79afd8cf1a 100644 --- a/docs/iris/src/whatsnew/1.0.rst +++ b/docs/iris/src/whatsnew/1.0.rst @@ -1,12 +1,15 @@ -What's new in Iris 1.0 -********************** +v1.0 (17 Oct 2012) +****************** -:Release: 1.0.0 -:Date: 15 Oct, 2012 - -This document explains the new/changed features of Iris in version 1.0. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) + +.. contents:: Skip to section: + :local: + :depth: 3 + + With the release of Iris 1.0, we have broadly completed the transition to the CF data model, and established a stable foundation for future work. Following this release we plan to deliver significant performance @@ -28,48 +31,44 @@ to formalise their data model reach maturity, they will be included in Iris where significant backwards-compatibility can be maintained. -Iris 1.0 features -================= +Features +======== A summary of the main features added with version 1.0: * Hybrid-pressure vertical coordinates, and the ability to load from GRIB. + * Initial support for CF-style coordinate systems. + * Use of Cartopy for mapping in matplotlib. + * Load data from NIMROD files. + * Availability of Cynthia Brewer colour palettes. + * Add a citation to a plot. + * Ensures netCDF files are properly closed. + * The ability to bypass merging when loading data. + * Save netCDF files with an unlimited dimension. + * A more explicit set of load functions, which also allow the automatic cube merging to be bypassed as a last resort. + * The ability to project a cube with a lat-lon or rotated lat-lon coordinate system into a range of map projections e.g. Polar Stereographic. - -Incompatible changes --------------------- -* The "source" and "history" metadata are now represented as Cube - attributes, where previously they used coordinates. -* :meth:`iris.cube.Cube.coord_dims()` now returns a tuple instead of a list. -* The ``iris.plot.gcm`` and ``iris.plot.map_setup`` functions are now removed. - See :ref:`whats-new-cartopy` for further details. - -Deprecations ------------- -* The methods :meth:`iris.coords.Coord.cos()` and - :meth:`iris.coords.Coord.sin()` have been deprecated. -* The :func:`iris.load_strict()` function has been deprecated. Code - should now use the :func:`iris.load_cube()` and - :func:`iris.load_cubes()` functions instead. +* Cube summaries are now more readable when the scalar coordinates + contain bounds. CF-netCDF coordinate systems -============================ +---------------------------- The coordinate systems in Iris are now defined by the CF-netCDF -`grid mappings `_. +`grid mappings `_. As of Iris 1.0 a subset of the CF-netCDF coordinate systems are supported, but this will be expanded in subsequent versions. Adding this code is a relatively simple, incremental process - it would make a @@ -79,13 +78,13 @@ contributing to the project. The coordinate systems available in Iris 1.0 and their corresponding Iris classes are: -================================================================================================== ========================================= -CF name Iris class -================================================================================================== ========================================= -`Latitude-longitude `_ :class:`~iris.coord_systems.GeogCS` -`Rotated pole `_ :class:`~iris.coord_systems.RotatedGeogCS` -`Transverse Mercator `_ :class:`~iris.coord_systems.TransverseMercator` -================================================================================================== ========================================= +================================================================================================================= ========================================= +CF name Iris class +================================================================================================================= ========================================= +`Latitude-longitude `_ :class:`~iris.coord_systems.GeogCS` +`Rotated pole `_ :class:`~iris.coord_systems.RotatedGeogCS` +`Transverse Mercator `_ :class:`~iris.coord_systems.TransverseMercator` +================================================================================================================= ========================================= For convenience, Iris also includes the :class:`~iris.coord_systems.OSGB` class which provides a simple way to create the transverse Mercator @@ -96,7 +95,7 @@ coordinate system used by the British .. _whats-new-cartopy: Using Cartopy for mapping in matplotlib -======================================= +--------------------------------------- The underlying map drawing package has now been updated to use `Cartopy `_. Cartopy provides a @@ -143,12 +142,11 @@ For more examples of what can be done with Cartopy, see the Iris gallery and Hybrid-pressure -=============== +--------------- With the introduction of the :class:`~iris.aux_factory.HybridPressureFactory` class, it is now possible to represent data expressed on a -hybrid-pressure vertical coordinate, as defined by the second variant in -`Appendix D `_. +`hybrid-pressure vertical coordinate `_. A hybrid-pressure factory is created with references to the coordinates which provide the components of the hybrid coordinate ("ap" and "b") and the surface pressure. In return, it provides a virtual "pressure" @@ -160,11 +158,11 @@ the derived "pressure" coordinate for certain data [#f1]_ from the .. [#f1] Where the level type is either 105 or 119, and where the surface pressure has an ECMWF paramId of - `152 `_. + `152 `_. NetCDF -====== +------ When saving a Cube to a netCDF file, Iris will now define the outermost dimension as an unlimited/record dimension. In combination with the @@ -190,7 +188,7 @@ processes. Brewer colour palettes -====================== +---------------------- Iris includes a selection of carefully designed colour palettes produced by Cynthia Brewer. The :mod:`iris.palette` module registers the Brewer @@ -216,7 +214,7 @@ in the citation guidance provided by Cynthia Brewer. Metadata attributes -=================== +------------------- Iris now stores "source" and "history" metadata in Cube attributes. For example:: @@ -250,7 +248,7 @@ Where previously it would have appeared as:: New loading functions -===================== +--------------------- The main functions for loading cubes are now: - :func:`iris.load()` @@ -273,7 +271,7 @@ functions instead. Cube projection -=============== +--------------- Iris now has the ability to project a cube into a number of map projections. This functionality is provided by :func:`iris.analysis.cartography.project()`. @@ -310,7 +308,24 @@ preserved. This function currently assumes global data and will if necessary extrapolate beyond the geographical extent of the source cube. -Other changes -============= -* Cube summaries are now more readable when the scalar coordinates - contain bounds. +Incompatible changes +==================== + +* The "source" and "history" metadata are now represented as Cube + attributes, where previously they used coordinates. + +* :meth:`iris.cube.Cube.coord_dims()` now returns a tuple instead of a list. + +* The ``iris.plot.gcm`` and ``iris.plot.map_setup`` functions are now removed. + See :ref:`whats-new-cartopy` for further details. + + +Deprecations +============ + +* The methods :meth:`iris.coords.Coord.cos()` and + :meth:`iris.coords.Coord.sin()` have been deprecated. + +* The :func:`iris.load_strict()` function has been deprecated. Code + should now use the :func:`iris.load_cube()` and + :func:`iris.load_cubes()` functions instead. diff --git a/docs/iris/src/whatsnew/1.1.rst b/docs/iris/src/whatsnew/1.1.rst index 274ec65ff6..ea85dbc42c 100644 --- a/docs/iris/src/whatsnew/1.1.rst +++ b/docs/iris/src/whatsnew/1.1.rst @@ -1,71 +1,64 @@ -What's new in Iris 1.1 -********************** +v1.1 (03 Jan 2013) +****************** -:Release: 1.1.0 -:Date: 7 Dec, 2012 - -This document explains the new/changed features of Iris in version 1.1. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -With the release of Iris 1.1, we are introducing support for Mac OS X. -Version 1.1 also sees the first batch of performance enhancements, with -some notable improvements to netCDF/PP import. + +.. contents:: Skip to section: + :local: + :depth: 3 -Iris 1.1 features -================= +Features +======== -A summary of the main features added with version 1.1: +With the release of Iris 1.1, we are introducing support for Mac OS X. +Version 1.1 also sees the first batch of performance enhancements, with +some notable improvements to netCDF/PP import. * Support for Mac OS X. + * GRIB1 import now supports time units of "3 hours". + * Fieldsfile import now supports unpacked and "CRAY" 32-bit packed data in 64-bit Fieldsfiles. + * PP file import now supports "CRAY" 32-bit packed data. + * Various performance improvements, particularly for netCDF import, PP import, and constraints. + * GRIB2 export now supports level types of altitude and height (codes 102 and 103). + * iris.analysis.cartography.area_weights now supports non-standard dimension orders. + * PP file import now adds the "forecast_reference_time" for fields where LBTIM is 11, 12, 13, 31, or 32. + * PP file import now supports LBTIM values of 1, 2, and 3. + * Fieldsfile import now has some support for ancillary files. + * Coordinate categorisation functions added for day-of-year and user-defined seasons. + * GRIB2 import now has partial support for probability data defined with product template 4.9. -Bugs fixed ----------- -* PP export no longer attempts to set/overwrite the STASH code based on - the standard_name. -* Cell comparisons now work consistently, which fixes a bug where - bounded_cell > point_cell compares the point to the bounds but, - point_cell < bounded_cell compares the points. -* Fieldsfile import now correctly recognises pre v3.1 and post v5.2 - versions, which fixes a bug where the two were interchanged. -* iris.analysis.trajectory.interpolate now handles hybrid-height. - -Incompatible changes --------------------- -* N/A - -Deprecations ------------- -* N/A - Coordinate categorisation -========================= +------------------------- An :func:`~iris.coord_categorisation.add_day_of_year` categorisation function has been added to the existing suite in :mod:`iris.coord_categorisation`. + Custom seasons --------------- +~~~~~~~~~~~~~~ The conventional seasonal categorisation functions have been complemented by two groups of functions which handle user-defined, @@ -97,3 +90,19 @@ The other custom season function is: This function adds a coordinate containing True/False values determined by membership of a single custom season. + + +Bugs fixed +========== + +* PP export no longer attempts to set/overwrite the STASH code based on + the standard_name. + +* Cell comparisons now work consistently, which fixes a bug where + bounded_cell > point_cell compares the point to the bounds but, + point_cell < bounded_cell compares the points. + +* Fieldsfile import now correctly recognises pre v3.1 and post v5.2 + versions, which fixes a bug where the two were interchanged. + +* iris.analysis.trajectory.interpolate now handles hybrid-height. diff --git a/docs/iris/src/whatsnew/1.10.rst b/docs/iris/src/whatsnew/1.10.rst index 26f21c0252..b5dfc1974b 100644 --- a/docs/iris/src/whatsnew/1.10.rst +++ b/docs/iris/src/whatsnew/1.10.rst @@ -1,14 +1,18 @@ -What's New in Iris 1.10 -*********************** +v1.10 (05 Sep 2016) +********************* -:Release: 1.10 -:Date: 5th September 2016 - -This document explains the new/changed features of Iris in version 1.10 +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.10 Features -================== + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== + .. _iris_grib_added: * Support has now been added for the @@ -19,11 +23,11 @@ Iris 1.10 Features iris module :mod:`iris.fileformats.grib`. * The capabilities of ``iris_grib`` are essentially the same as the existing - :mod:`iris.fileformats.grib` when used with ``iris.FUTURE.strict_grib_load=True``, - with only small detail differences. + :mod:`iris.fileformats.grib` when used with + ``iris.FUTURE.strict_grib_load=True``, with only small detail differences. - * The old :mod:`iris.fileformats.grib` module is now deprecated and may shortly be - removed. + * The old :mod:`iris.fileformats.grib` module is now deprecated and may + shortly be removed. * If you are already using the recommended :data:`iris.FUTURE` setting ``iris.FUTURE.strict_grib_load=True`` this should not cause problems, as @@ -44,79 +48,204 @@ Iris 1.10 Features any problems you uncover, such as files that will no longer load with the new implementation. -* :meth:`iris.experimental.regrid.PointInCell.regridder` now works across coordinate systems, including non latlon systems. Additionally, the requirement that the source data X and Y coordinates be 2D has been removed. NB: some aspects of this change are backwards incompatible. -* Plotting non-Gregorian calendars is now supported. This adds `nc_time_axis `_ as a dependency. -* Promoting a scalar coordinate to a dimension coordinate with :func:`iris.util.new_axis` no longer loads deferred data. -* The parsing functionality for Cell Methods from netCDF files is available as part of the :mod:`iris.fileformats.netcdf` module as :func:`iris.fileformats.netcdf.parse_cell_methods`. -* Support for the NameIII Version 2 file format has been added. -* Loading netcdf data in Mercator and Stereographic projections now accepts optional extra projection parameter attributes (``false_easting``, ``false_northing`` and ``scale_factor_at_projection_origin``), if they match the default values. +* :meth:`iris.experimental.regrid.PointInCell.regridder` now works across + coordinate systems, including non latlon systems. Additionally, the + requirement that the source data X and Y coordinates be 2D has been removed. + NB: some aspects of this change are backwards incompatible. - * NetCDF files which define a Mercator projection where the ``false_easting``, ``false_northing`` and ``scale_factor_at_projection_origin`` match the defaults will have the projection loaded correctly. Otherwise, a warning will be issued for each parameter that does not match the default and the projection will not be loaded. - * NetCDF files which define a Steroegraphic projection where the ``scale_factor_at_projection_origin`` is equal to 1.0 will have the projection loaded correctly. Otherwise, a warning will be issued and the projection will not be loaded. +* Plotting non-Gregorian calendars is now supported. This adds + `nc_time_axis `_ as a dependency. -* The :mod:`iris.plot` routines :func:`~iris.plot.contour`, :func:`~iris.plot.contourf`, :func:`~iris.plot.outline`, :func:`~iris.plot.pcolor`, :func:`~iris.plot.pcolormesh` and :func:`~iris.plot.points` now support plotting cubes with anonymous dimensions by specifying the *numeric index* of the anonymous dimension within the ``coords`` keyword argument. +* Promoting a scalar coordinate to a dimension coordinate with + :func:`iris.util.new_axis` no longer loads deferred data. + +* The parsing functionality for Cell Methods from netCDF files is available + as part of the :mod:`iris.fileformats.netcdf` module as + :func:`iris.fileformats.netcdf.parse_cell_methods`. + +* Support for the NameIII Version 2 file format has been added. + +* Loading netcdf data in Mercator and Stereographic projections now accepts + optional extra projection parameter attributes (``false_easting``, + ``false_northing`` and ``scale_factor_at_projection_origin``), if they match + the default values. + + * NetCDF files which define a Mercator projection where the + ``false_easting``, ``false_northing`` and + ``scale_factor_at_projection_origin`` match the defaults will have the + projection loaded correctly. Otherwise, a warning will be issued for each + parameter that does not match the default and the projection will not be + loaded. + + * NetCDF files which define a Steroegraphic projection where the + ``scale_factor_at_projection_origin`` is equal to 1.0 will have the + projection loaded correctly. Otherwise, a warning will be issued and the + projection will not be loaded. + +* The :mod:`iris.plot` routines :func:`~iris.plot.contour`, + :func:`~iris.plot.contourf`, :func:`~iris.plot.outline`, + :func:`~iris.plot.pcolor`, :func:`~iris.plot.pcolormesh` and + :func:`~iris.plot.points` now support plotting cubes with anonymous + dimensions by specifying the *numeric index* of the anonymous dimension + within the ``coords`` keyword argument. Note that the axis of the anonymous dimension will be plotted in index space. -* NetCDF loading and saving now supports Cubes that use the LambertConformal coordinate system. -* The experimental structured Fieldsfile loader :func:`~iris.experimental.fieldsfile.load` has been extended to also load structured PP files. +* NetCDF loading and saving now supports Cubes that use the LambertConformal + coordinate system. - Structured loading is a streamlined operation, offering the benefit of a significantly faster loading alternative to the more generic :func:`iris.load` mechanism. +* The experimental structured Fieldsfile loader + :func:`~iris.experimental.fieldsfile.load` has been extended to also load + structured PP files. - Note that structured loading is not an optimised wholesale replacement of :func:`iris.load`. Structured loading is restricted to input containing contiguously ordered fields for each phenomenon that repeat regularly over the same vertical levels and times. For further details, see :func:`~iris.experimental.fieldsfile.load` + Structured loading is a streamlined operation, offering the benefit of a + significantly faster loading alternative to the more generic + :func:`iris.load` mechanism. + + Note that structured loading is not an optimised wholesale replacement of + :func:`iris.load`. Structured loading is restricted to input containing + contiguously ordered fields for each phenomenon that repeat regularly over + the same vertical levels and times. For further details, see + :func:`~iris.experimental.fieldsfile.load` * :mod:`iris.experimental.regrid_conservative` is now compatible with ESMPy v7. -* Saving zonal (i.e. longitudinal) means to PP files now sets the '64s' bit in LBPROC. + +* Saving zonal (i.e. longitudinal) means to PP files now sets the '64s' bit in + LBPROC. + * Loading of 'little-endian' PP files is now supported. -* All appropriate :mod:`iris.plot` functions now handle an ``axes`` keyword, allowing use of the object oriented matplotlib interface rather than pyplot. -* The ability to pass file format object lists into the rules based load pipeline, as used for GRIB, Fields Files and PP has been added. The :func:`iris.fileformats.pp.load_pairs_from_fields` and :func:`iris.fileformats.grib.load_pairs_from_fields` are provided to produce cubes from such lists. These lists may have been filtered or altered using the appropriate :mod:`iris.fileformats` modules. -* Cubes can now have an 'hour' coordinate added with :meth:`iris.coord_categorisation.add_hour`. -* Time coordinates from PP fields with an lbcode of the form 3xx23 are now correctly encoded with a 360-day calendar. -* The loading from and saving to netCDF of CF cell_measure variables is supported, along with their representation within a Cube as :attr:`~iris.cube.Cube.cell_measures`. -* Cubes with anonymous dimensions can now be concatenated. This can only occur along a dimension that is not anonymous. -* NetCDF saving of ``valid_range``, ``valid_min`` and ``valid_max`` cube attributes is now allowed. - -Bugs Fixed + +* All appropriate :mod:`iris.plot` functions now handle an ``axes`` keyword, + allowing use of the object oriented matplotlib interface rather than pyplot. + +* The ability to pass file format object lists into the rules based load + pipeline, as used for GRIB, Fields Files and PP has been added. The + :func:`iris.fileformats.pp.load_pairs_from_fields` and + :func:`iris.fileformats.grib.load_pairs_from_fields` are provided to produce + cubes from such lists. These lists may have been filtered or altered using + the appropriate :mod:`iris.fileformats` modules. + +* Cubes can now have an 'hour' coordinate added with + :meth:`iris.coord_categorisation.add_hour`. + +* Time coordinates from PP fields with an lbcode of the form 3xx23 are now + correctly encoded with a 360-day calendar. + +* The loading from and saving to netCDF of CF cell_measure variables is + supported, along with their representation within a Cube as + :attr:`~iris.cube.Cube.cell_measures`. + +* Cubes with anonymous dimensions can now be concatenated. This can only occur + along a dimension that is not anonymous. + +* NetCDF saving of ``valid_range``, ``valid_min`` and ``valid_max`` cube + attributes is now allowed. + + +Bugs fixed ========== -* Altered Cell Methods to display coordinate's standard_name rather than var_name where appropriate to avoid human confusion. -* Saving multiple cubes with netCDF4 protected attributes should now work as expected. -* Concatenating cubes with singleton dimensions (dimensions of size one) now works properly. -* Fixed the ``grid_mapping_name`` and ``secant_latitudes`` handling for the LambertConformal coordinate system. -* Fixed bug in :func:`iris.analysis.cartography.project` where the output projection coordinates didn't have units. -* Attempting to use :meth:`iris.sample_data_path` to access a file that isn't actually Iris sample data now raises a more descriptive error. A note about the appropriate use of `sample_data_path` has also been added to the documentation. -* Fixed a bug where regridding or interpolation with the :class:`~iris.analysis.Nearest` scheme returned floating-point results even when the source data was integer typed. It now always returns the same type as the source data. -* Fixed a bug where regridding circular data would ignore any source masking. This affected any regridding using the :class:`~iris.analysis.Linear` and :class:`~iris.analysis.Nearest` schemes, and also :func:`iris.analysis.interpolate.linear`. -* The ``coord_name`` parameter to :func:`~iris.fileformats.rules.scalar_cell_method` is now checked correctly. -* LBPROC is set correctly when a cube containing the minimum of a variable is saved to a PP file. The IA component of LBTIM is set correctly when saving maximum or minimum values. -* The performance of :meth:`iris.cube.Cube.extract` when a list of values is given to an instance of :class:`iris.Constraint` has been improved considerably. -* Fixed a bug with :meth:`iris.cube.Cube.data` where an :class:`numpy.ndarray` was not being returned for scalar cubes with lazy data. -* When saving in netcdf format, the units of 'latitude' and 'longitude' coordinates specified in 'degrees' are saved as 'degrees_north' and 'degrees_east' respectively, as defined in the CF conventions for netCDF files: sections 4.1 and 4.2. -* Fixed a bug with a class of pp files with lbyr == 0, where the date would cause errors when converting to a datetime object (e.g. when printing a cube). - - When processing a pp field with lbtim = 2x, lbyr == lbyrd == 0 and lbmon == lbmond, 'month' and 'month_number' coordinates are created instead of 'time'. - -* Fixed a bug in :meth:`~iris.analysis.calculus.curl` where the sign of the r-component for spherical coordinates was opposite to what was expected. + +* Altered Cell Methods to display coordinate's standard_name rather than + var_name where appropriate to avoid human confusion. + +* Saving multiple cubes with netCDF4 protected attributes should now work as + expected. + +* Concatenating cubes with singleton dimensions (dimensions of size one) now + works properly. + +* Fixed the ``grid_mapping_name`` and ``secant_latitudes`` handling for the + LambertConformal coordinate system. + +* Fixed bug in :func:`iris.analysis.cartography.project` where the output + projection coordinates didn't have units. + +* Attempting to use :meth:`iris.sample_data_path` to access a file that isn't + actually Iris sample data now raises a more descriptive error. A note about + the appropriate use of `sample_data_path` has also been added to the + documentation. + +* Fixed a bug where regridding or interpolation with the + :class:`~iris.analysis.Nearest` scheme returned floating-point results even + when the source data was integer typed. It now always returns the same type + as the source data. + +* Fixed a bug where regridding circular data would ignore any source masking. + This affected any regridding using the :class:`~iris.analysis.Linear` and + :class:`~iris.analysis.Nearest` schemes, and also + :func:`iris.analysis.interpolate.linear`. + +* The ``coord_name`` parameter to + :func:`~iris.fileformats.rules.scalar_cell_method` is now checked correctly. + +* LBPROC is set correctly when a cube containing the minimum of a variable is + saved to a PP file. The IA component of LBTIM is set correctly when saving + maximum or minimum values. + +* The performance of :meth:`iris.cube.Cube.extract` when a list of values is + given to an instance of :class:`iris.Constraint` has been improved + considerably. + +* Fixed a bug with :meth:`iris.cube.Cube.data` where an :class:`numpy.ndarray` + was not being returned for scalar cubes with lazy data. + +* When saving in netcdf format, the units of 'latitude' and 'longitude' + coordinates specified in 'degrees' are saved as 'degrees_north' and + 'degrees_east' respectively, as defined in the CF conventions for netCDF + files: sections 4.1 and 4.2. + +* Fixed a bug with a class of pp files with lbyr == 0, where the date would + cause errors when converting to a datetime object (e.g. when printing a cube). + + When processing a pp field with lbtim = 2x, lbyr == lbyrd == 0 and + lbmon == lbmond, 'month' and 'month_number' coordinates are created instead + of 'time'. + +* Fixed a bug in :meth:`~iris.analysis.calculus.curl` where the sign of the + r-component for spherical coordinates was opposite to what was expected. + * A bug that prevented cube printing in some cases has been fixed. -* Fixed a bug where a deepcopy of a :class:`~iris.coords.DimCoord` would have writable ``points`` and ``bounds`` arrays. These arrays can now no longer be modified in-place. -* Concatenation no longer occurs when the auxiliary coordinates of the cubes do not match. This check is not applied to AuxCoords that span the dimension the concatenation is occuring along. This behaviour can be switched off by setting the ``check_aux_coords`` kwarg in :meth:`iris.cube.CubeList.concatenate` to False. -* Fixed a bug in :meth:`iris.cube.Cube.subset` where an exception would be thrown while trying to subset over a non-dimensional scalar coordinate. -Incompatible Changes +* Fixed a bug where a deepcopy of a :class:`~iris.coords.DimCoord` would have + writeable ``points`` and ``bounds`` arrays. These arrays can now no longer be + modified in-place. + +* Concatenation no longer occurs when the auxiliary coordinates of the cubes do + not match. This check is not applied to AuxCoords that span the dimension the + concatenation is occurring along. This behaviour can be switched off by + setting the ``check_aux_coords`` kwarg in + :meth:`iris.cube.CubeList.concatenate` to False. + +* Fixed a bug in :meth:`iris.cube.Cube.subset` where an exception would be + thrown while trying to subset over a non-dimensional scalar coordinate. + + +Incompatible changes ==================== -* The source and target for :meth:`iris.experimental.regrid.PointInCell.regridder` must now have defined coordinate systems (i.e. not ``None``). Additionally, the source data X and Y coordinates must have the same cube dimensions. + +* The source and target for + :meth:`iris.experimental.regrid.PointInCell.regridder` must now have defined + coordinate systems (i.e. not ``None``). Additionally, the source data X and Y + coordinates must have the same cube dimensions. + Deprecations ============ + * Deprecated the :class:`iris.Future` option ``iris.FUTURE.strict_grib_load``. This only affected the module :mod:`iris.fileformats.grib`, which is itself now deprecated. Please see :ref:`iris_grib package `, above. + * Deprecated the module :mod:`iris.fileformats.grib`. The new package `iris_grib `_ replaces this - fuctionality, which will shortly be removed. + functionality, which will shortly be removed. Please see :ref:`iris_grib package `, above. -* The use of :data:`iris.config.SAMPLE_DATA_DIR` has been deprecated and replaced by the now importable `iris_sample_data `_ package. + +* The use of :data:`iris.config.SAMPLE_DATA_DIR` has been deprecated and + replaced by the now importable + `iris_sample_data `_ package. * Deprecated the module :mod:`iris.analysis.interpolate`. This contains the following public items, all of which are now deprecated and @@ -132,21 +261,38 @@ Deprecations Please use the replacement facilities individually noted in the module documentation for :mod:`iris.analysis.interpolate` + * The method :meth:`iris.cube.Cube.regridded` has been deprecated. Please use :meth:`iris.cube.Cube.regrid` instead (see :meth:`~iris.cube.Cube.regridded` for details). -* Deprecated :data:`iris.fileformats.grib.hindcast_workaround` and :class:`iris.fileformats.grib.GribWrapper`. The class :class:`iris.fileformats.grib.message.GribMessage` provides alternative means of working with GRIB message instances. + +* Deprecated :data:`iris.fileformats.grib.hindcast_workaround` and + :class:`iris.fileformats.grib.GribWrapper`. The class + :class:`iris.fileformats.grib.message.GribMessage` provides alternative means + of working with GRIB message instances. + * Deprecated the module :mod:`iris.fileformats.ff`. Please use the replacement facilities in module :mod:`iris.fileformats.um` : - * :func:`iris.fileformats.um.um_to_pp` replaces :class:`iris.fileformats.ff.FF2PP`. - * :func:`iris.fileformats.um.load_cubes` replaces :func:`iris.fileformats.ff.load_cubes`. - * :func:`iris.fileformats.um.load_cubes_32bit_ieee` replaces :func:`iris.fileformats.ff.load_cubes_32bit_ieee`. + * :func:`iris.fileformats.um.um_to_pp` replaces + :class:`iris.fileformats.ff.FF2PP`. + * :func:`iris.fileformats.um.load_cubes` replaces + :func:`iris.fileformats.ff.load_cubes`. + * :func:`iris.fileformats.um.load_cubes_32bit_ieee` replaces + :func:`iris.fileformats.ff.load_cubes_32bit_ieee`. + + All other public components are generally deprecated and will be removed in a + future release. + +* The :func:`iris.fileformats.pp.as_pairs` and + :func:`iris.fileformats.grib.as_pairs` are deprecated. These are replaced + with :func:`iris.fileformats.pp.save_pairs_from_cube` and + :func:`iris.fileformats.grib.save_pairs_from_cube`. - All other public components are generally deprecated and will be removed in a future release. +* ``iris.fileformats.pp_packing`` has been deprecated. Please install the + separate `mo_pack `_ package instead. + This provides the same functionality. -* The :func:`iris.fileformats.pp.as_pairs` and :func:`iris.fileformats.grib.as_pairs` are deprecated. These are replaced with :func:`iris.fileformats.pp.save_pairs_from_cube` and :func:`iris.fileformats.grib.save_pairs_from_cube`. -* ``iris.fileformats.pp_packing`` has been deprecated. Please install the separate `mo_pack `_ package instead. This provides the same functionality. * Deprecated logging functions (currently used only for rules logging): :data:`iris.config.iris.config.RULE_LOG_DIR`, :data:`iris.config.iris.config.RULE_LOG_IGNORE` and @@ -163,14 +309,37 @@ Deprecations :class:`iris.fileformats.rules.RulesContainer` and :func:`iris.fileformats.rules.calculate_forecast_period`. -* Deprecated the custom pp save rules mechanism implemented by the functions :func:`iris.fileformats.pp.add_save_rules` and :func:`iris.fileformats.pp.reset_save_rules`. The functions :func:`iris.fileformats.pp.as_fields`, :func:`iris.fileformats.pp.as_pairs` and :func:`iris.fileformats.pp.save_fields` provide alternative means of achieving the same ends. +* Deprecated the custom pp save rules mechanism implemented by the functions + :func:`iris.fileformats.pp.add_save_rules` and + :func:`iris.fileformats.pp.reset_save_rules`. The functions + :func:`iris.fileformats.pp.as_fields`, :func:`iris.fileformats.pp.as_pairs` + and :func:`iris.fileformats.pp.save_fields` provide alternative means of + achieving the same ends. + + +Documentation +============= + +* It is now clear that repeated values will form a group under + :meth:`iris.cube.Cube.aggregated_by` even if they aren't consecutive. Hence, + the documentation for :mod:`iris.cube` has been changed to reflect this. + +* The documentation for :meth:`iris.analysis.calculus.curl` has been updated + for clarity. + +* False claims about :meth:`iris.fileformats.pp.save`, + :meth:`iris.fileformats.pp.as_pairs`, and + :meth:`iris.fileformats.pp.as_fields` being able to take instances of + :class:`iris.cube.CubeList` as inputs have been removed. + +* A new code example + :ref:`sphx_glr_generated_gallery_meteorology_plot_wind_speed.py`, + demonstrating the use of a quiver plot to display wind speeds over Lake + Victoria, has been added. + +* The docstring for :data:`iris.analysis.SUM` has been updated to explicitly + state that weights passed to it aren't normalised internally. -Documentation Changes -===================== -* It is now clear that repeated values will form a group under :meth:`iris.cube.Cube.aggregated_by` even if they aren't consecutive. Hence, the documentation for :mod:`iris.cube` has been changed to reflect this. -* The documentation for :meth:`iris.analysis.calculus.curl` has been updated for clarity. -* False claims about :meth:`iris.fileformats.pp.save`, :meth:`iris.fileformats.pp.as_pairs`, and :meth:`iris.fileformats.pp.as_fields` being able to take instances of :class:`iris.cube.CubeList` as inputs have been removed. -* A :doc:`new code example <../examples/Meteorology/wind_speed>`, demonstrating the use of a quiver plot to display wind speeds over Lake Victoria, has been added. -* The docstring for :data:`iris.analysis.SUM` has been updated to explicitly state that weights passed to it aren't normalised internally. -* A note regarding the impossibility of partially collapsing multi-dimensional coordinates has been added to the user guide. +* A note regarding the impossibility of partially collapsing multi-dimensional + coordinates has been added to the user guide. diff --git a/docs/iris/src/whatsnew/1.11.rst b/docs/iris/src/whatsnew/1.11.rst index eb93ec2f8c..d04355b800 100644 --- a/docs/iris/src/whatsnew/1.11.rst +++ b/docs/iris/src/whatsnew/1.11.rst @@ -1,31 +1,45 @@ -What's New in Iris 1.11 -*********************** +v1.11 (29 Oct 2016) +********************* -:Release: 1.11 -:Date: 2016-11-28 - -This document explains the new/changed features of Iris in version 1.11 +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.11 Features -================== -* If available, display the ``STASH`` code instead of ``unknown / (unknown)`` when printing cubes - with no ``standard_name`` and no ``units``. + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== + +* If available, display the ``STASH`` code instead of ``unknown / (unknown)`` + when printing cubes with no ``standard_name`` and no ``units``. + * Support for saving to netCDF with data packing has been added. -* The coordinate system :class:`iris.coord_systems.LambertAzimuthalEqualArea` has been added with NetCDF saving support. -Bugs Fixed +* The coordinate system :class:`iris.coord_systems.LambertAzimuthalEqualArea` + has been added with NetCDF saving support. + +Bugs fixed ========== -* Fixed a floating point tolerance bug in :func:`iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid` + +* Fixed a floating point tolerance bug in + :func:`iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid` for wrapped longitudes. -* Allow :func:`iris.util.new_axis` to promote the nominated scalar coordinate of a cube - with a scalar masked constant data payload. -* Fixed a bug where :func:`iris.util._is_circular` would erroneously return false - when coordinate values are decreasing. -* When saving to NetCDF, the existing behaviour of writing string attributes as ASCII has been - maintained across known versions of netCDF4-python. - -Documentation Changes -===================== + +* Allow :func:`iris.util.new_axis` to promote the nominated scalar coordinate + of a cube with a scalar masked constant data payload. + +* Fixed a bug where :func:`iris.util._is_circular` would erroneously return + false when coordinate values are decreasing. + +* When saving to NetCDF, the existing behaviour of writing string attributes + as ASCII has been maintained across known versions of netCDF4-python. + + +Documentation +============= + * Fuller doc-string detail added to :func:`iris.analysis.cartography.unrotate_pole` and :func:`iris.analysis.cartography.rotate_pole`. diff --git a/docs/iris/src/whatsnew/1.12.rst b/docs/iris/src/whatsnew/1.12.rst index 59ea47d876..1d7fc8f978 100644 --- a/docs/iris/src/whatsnew/1.12.rst +++ b/docs/iris/src/whatsnew/1.12.rst @@ -1,14 +1,18 @@ -What's New in Iris 1.12 -*********************** +v1.12 (31 Jan 2017) +********************* -:Release: 1.12 -:Date: 2017-01-30 - -This document explains the new/changed features of Iris in version 1.12 +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.12 Features -================== + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== + .. _showcase: .. admonition:: Showcase Feature: New regridding schemes @@ -121,11 +125,13 @@ Iris 1.12 Features Deprecations ============ + * The module :mod:`iris.experimental.fieldsfile` has been deprecated, in favour of the new fast-loading mechanism provided by :meth:`iris.fileformats.um.structured_um_loading`. -Documentation Changes -===================== +Documentation +============= + * Corrected documentation of :class:`iris.analysis.AreaWeighted` scheme to make the usage scope clearer. diff --git a/docs/iris/src/whatsnew/1.13.rst b/docs/iris/src/whatsnew/1.13.rst index 532c160f13..30b3731d96 100644 --- a/docs/iris/src/whatsnew/1.13.rst +++ b/docs/iris/src/whatsnew/1.13.rst @@ -1,37 +1,78 @@ -What's New in Iris 1.13 -*********************** +v1.13 (17 May 2017) +************************* -:Release: 1.13 -:Date: 2017-05-17 +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) -This document explains the new/changed features of Iris in version 1.13 -(:doc:`View all changes `.) +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== -Iris 1.13 Features -================== +* Allow the reading of NAME trajectories stored by time instead of by particle + number. -* Allow the reading of NAME trajectories stored by time instead of by particle number. * An experimental link to python-stratify via :mod:`iris.experimental.stratify`. -* Data arrays may be shared between cubes, and subsets of cubes, by using the :meth:`iris.cube.share_data` flag. +* Data arrays may be shared between cubes, and subsets of cubes, by using the + :meth:`iris.cube.share_data` flag. -Bug Fixes + +Bug fixes ========= -* The bounds are now set correctly on the longitude coordinate if a zonal mean diagnostic has been loaded from a PP file as per the CF Standard. -* NetCDF loading will now determine whether there is a string-valued scalar label, i.e. a character variable that only has one dimension (the length of the string), and interpret this correctly. -* A line plot of geographic coordinates (e.g. drawing a trajectory) wraps around the edge of the map cleanly, rather than plotting a segment straight across the map. -* When saving to PP, lazy data is preserved when generating PP fields from cubes so that a list of cubes can be saved to PP without excessive memory requirements. -* An error is now correctly raised if a user tries to perform an arithmetic operation on two cubes with mismatching coordinates. Previously these cases were caught by the add and subtract operators, and now it is also caught by the multiply and divide operators. -* Limited area Rotated Pole datasets where the data range is ``0 <= lambda < 360``, for example as produced in New Zealand, are plotted over a sensible map extent by default. -* Removed the potential for a RuntimeWarning: overflow encountered in ``int_scalars`` which was missed during collapsed calculations. This could trip up unwary users of limited data types, such as int32 for very large numbers (e.g. seconds since 1970). -* The CF conventions state that certain ``formula_terms`` terms may be omitted and assumed to be zero (http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#dimensionless-v-coord) so Iris now allows factories to be constructed with missing terms. -* In the User Guide's contour plot example, clabel inline is set to be False so that it renders correctly, avoiding spurious horizontal lines across plots, although this does make labels a little harder to see. -* The computation of area weights has been changed to a more numerically stable form. The previous form converted latitude to colatitude and used difference of cosines in the cell area computation. This formulation uses latitude and difference of sines. The conversion from latitude to colatitude at lower precision causes errors when computing the cell areas. +* The bounds are now set correctly on the longitude coordinate if a zonal mean + diagnostic has been loaded from a PP file as per the CF Standard. + +* NetCDF loading will now determine whether there is a string-valued scalar + label, i.e. a character variable that only has one dimension (the length of + the string), and interpret this correctly. + +* A line plot of geographic coordinates (e.g. drawing a trajectory) wraps + around the edge of the map cleanly, rather than plotting a segment straight + across the map. + +* When saving to PP, lazy data is preserved when generating PP fields from + cubes so that a list of cubes can be saved to PP without excessive memory + requirements. + +* An error is now correctly raised if a user tries to perform an arithmetic + operation on two cubes with mismatching coordinates. Previously these cases + were caught by the add and subtract operators, and now it is also caught by + the multiply and divide operators. + +* Limited area Rotated Pole datasets where the data range is + ``0 <= lambda < 360``, for example as produced in New Zealand, are plotted + over a sensible map extent by default. + +* Removed the potential for a RuntimeWarning: overflow encountered in + ``int_scalars`` which was missed during collapsed calculations. This could + trip up unwary users of limited data types, such as int32 for very large + numbers (e.g. seconds since 1970). + +* The CF conventions state that certain ``formula_terms`` terms may be omitted + and assumed to be zero + (http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#dimensionless-v-coord) + so Iris now allows factories to be constructed with missing terms. + +* In the User Guide's contour plot example, clabel inline is set to be False + so that it renders correctly, avoiding spurious horizontal lines across + plots, although this does make labels a little harder to see. + +* The computation of area weights has been changed to a more numerically + stable form. The previous form converted latitude to colatitude and used + difference of cosines in the cell area computation. This formulation uses + latitude and difference of sines. The conversion from latitude to colatitude + at lower precision causes errors when computing the cell areas. + Testing ======= -* Iris has adopted conda-forge to provide environments for continuous integration testing. +* Iris has adopted conda-forge to provide environments for continuous + integration testing. diff --git a/docs/iris/src/whatsnew/1.2.rst b/docs/iris/src/whatsnew/1.2.rst index 720ae73376..982a68add6 100644 --- a/docs/iris/src/whatsnew/1.2.rst +++ b/docs/iris/src/whatsnew/1.2.rst @@ -1,16 +1,17 @@ -What's new in Iris 1.2 -********************** +v1.2 (28 Feb 2013) +****************** -:Release: 1.2.0 -:Date: 7th March 2013 - -This document explains the new/changed features of Iris in version 1.2. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.2 features -================= -A summary of the main features added with version 1.2: +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== * :meth:`iris.cube.Cube.convert_units()` and :meth:`iris.coords.Coord.convert_units()` have been added. This is @@ -18,6 +19,7 @@ A summary of the main features added with version 1.2: another. For example, to convert a cube in kelvin to celsius, one can now call cube.convert_units('celsius'). The operation is in-place and if the units are not convertible an exception will be raised. + * :attr:`iris.cube.Cube.var_name`, :attr:`iris.coords.Coord.var_name` and :attr:`iris.aux_factory.AuxCoordFactory.var_name` attributes have been added. This attribute represents the CF variable name of the object. It is populated @@ -25,42 +27,57 @@ A summary of the main features added with version 1.2: var_name keyword argument has also been added to the :meth:`iris.cube.Cube.coord()`, :meth:`iris.cube.Cube.coords()` and :meth:`iris.cube.Cube.aux_factory()` methods. + * :meth:`iris.coords.Coord.is_compatible()` has been added. This method is used to determine whether two coordinates are sufficiently alike to allow operations such as :meth:`iris.coords.Coord.intersect()` and :func:`iris.analysis.interpolate.regrid()` to take place. A corresponding method for cubes, :meth:`iris.cube.Cube.is_compatible()`, has also been added. + * Printing a :class:`~iris.cube.Cube` is now more user friendly with regards to dates and time. All *time* and *forecast_reference_time* scalar coordinates now display human readable date/time information. + * The units of a :class:`~iris.cube.Cube` are now shown when it is printed. + * The area weights calculated by :func:`iris.analysis.cartography.area_weights` may now be normalised relative to the total grid area. -* Weights may now be passed to :meth:`iris.cube.Cube.rolling_window` aggregations, - thus allowing arbitrary digital filters to be applied to a :class:`~iris.cube.Cube`. + +* Weights may now be passed to :meth:`iris.cube.Cube.rolling_window` + aggregations, thus allowing arbitrary digital filters to be applied to a + :class:`~iris.cube.Cube`. + Bugs fixed ----------- +========== + * The GRIB hindcast interpretation of negative forecast times can be enabled via the :data:`iris.fileformats.grib.hindcast_workaround` flag. + * The NIMROD file loader has been extended to cope with orography vertical coordinates. + Incompatible changes --------------------- +==================== + * The deprecated :attr:`iris.cube.Cube.unit` and :attr:`iris.coords.Coord.unit` attributes have been removed. + Deprecations ------------- +============ + * The :meth:`iris.coords.Coord.unit_converted()` method has been deprecated. Users should make a copy of the coordinate using :meth:`iris.coords.Coord.copy()` and then call the :meth:`iris.coords.Coord.convert_units()` method of the new coordinate. + * With the addition of the var_name attribute the signatures of DimCoord and AuxCoord have changed. This should have no impact if you are providing parameters as keyword arguments, but it may cause issues if you are relying on the position/order of the arguments. + * Iteration over a :class:`~iris.cube.Cube` has been deprecated. Instead, users should use :meth:`iris.cube.Cube.slices`. diff --git a/docs/iris/src/whatsnew/1.3.rst b/docs/iris/src/whatsnew/1.3.rst index dbea08ad03..fd6f2cfef9 100644 --- a/docs/iris/src/whatsnew/1.3.rst +++ b/docs/iris/src/whatsnew/1.3.rst @@ -1,65 +1,42 @@ -What's new in Iris 1.3 -********************** +v1.3 (27 Mar 2013) +****************** -:Release: 1.3.0 -:Date: 27 March 2013 - -This document explains the new/changed features of Iris in version 1.3. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.3 features -================= -A summary of the main features added with version 1.3: +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== * Experimental support for :ref:`loading ABF/ABL files`. + * Support in :func:`iris.analysis.interpolate.linear` for longitude ranges other than [-180, 180]. + * Support for :ref:`customised CF profiles` on export to netCDF. + * The documentation now includes guidance on :ref:`how to cite Iris`. + * The ability to calculate the exponential of a Cube, via :func:`iris.analysis.maths.exp()`. + * Experimental support for :ref:`concatenating Cubes` along existing dimensions via :func:`iris.experimental.concatenate.concatenate()`. -Bugs fixed ----------- -* Printing a Cube now supports Unicode attribute values. -* PP export now sets LBMIN correctly. -* Converting between reference times now works correctly for - units with non-Gregorian calendars. -* Slicing a :class:`~iris.cube.CubeList` now returns a - :class:`~iris.cube.CubeList` instead of a normal list. - -Incompatible changes --------------------- -* N/A - -Deprecations ------------- -* The boolean methods/properties on the :class:`~iris.unit.Unit` class - have been updated to `is_...()` methods, in line with the project's - naming conventions. - - ====================================== =========================================== - Deprecated property/method New method - ====================================== =========================================== - :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()` - :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()` - :attr:`~iris.unit.Unit.no_unit` :meth:`~iris.unit.Unit.is_no_unit()` - :attr:`~iris.unit.Unit.time_reference` :meth:`~iris.unit.Unit.is_time_reference()` - :attr:`~iris.unit.Unit.unknown` :meth:`~iris.unit.Unit.is_unknown()` - ====================================== =========================================== - .. _whats-new-abf: Loading ABF/ABL files -===================== +--------------------- Support for the ABF and ABL file formats (as `defined `_ by the @@ -80,7 +57,7 @@ For example:: .. _whats-new-cf-profile: Customised CF profiles -====================== +---------------------- Iris now provides hooks in the CF-netCDF export process to allow user-defined routines to check and/or modify the representation in the @@ -89,10 +66,13 @@ netCDF file. The following keys within the ``iris.site_configuration`` dictionary have been **reserved** as hooks to *external* user-defined CF profile functions: - * ``cf_profile`` injests a :class:`iris.cube.Cube` for analysis and returns a profile result - * ``cf_patch`` modifies the CF-netCDF file associated with export of the :class:`iris.cube.Cube` + * ``cf_profile`` ingests a :class:`iris.cube.Cube` for analysis and returns a + profile result + * ``cf_patch`` modifies the CF-netCDF file associated with export of the + :class:`iris.cube.Cube` -The ``iris.site_configuration`` dictionary should be configured via the ``iris/site_config.py`` file. +The ``iris.site_configuration`` dictionary should be configured via the +``iris/site_config.py`` file. For further implementation details see ``iris/fileformats/netcdf.py``. @@ -100,7 +80,7 @@ For further implementation details see ``iris/fileformats/netcdf.py``. .. _whats-new-concat: Cube concatenation -================== +------------------ Iris now provides initial support for concatenating Cubes along one or more existing dimensions. Currently this will force the data to be @@ -126,3 +106,33 @@ combine these into a single Cube as follows:: As this is an experimental feature, your feedback is especially welcome. +Bugs fixed +========== + +* Printing a Cube now supports Unicode attribute values. + +* PP export now sets LBMIN correctly. + +* Converting between reference times now works correctly for + units with non-Gregorian calendars. + +* Slicing a :class:`~iris.cube.CubeList` now returns a + :class:`~iris.cube.CubeList` instead of a normal list. + + +Deprecations +============ + +* The boolean methods/properties on the :class:`~iris.unit.Unit` class + have been updated to `is_...()` methods, in line with the project's + naming conventions. + + ====================================== =========================================== + Deprecated property/method New method + ====================================== =========================================== + :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()` + :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()` + :attr:`~iris.unit.Unit.no_unit` :meth:`~iris.unit.Unit.is_no_unit()` + :attr:`~iris.unit.Unit.time_reference` :meth:`~iris.unit.Unit.is_time_reference()` + :attr:`~iris.unit.Unit.unknown` :meth:`~iris.unit.Unit.is_unknown()` + ====================================== =========================================== diff --git a/docs/iris/src/whatsnew/1.4.rst b/docs/iris/src/whatsnew/1.4.rst index 053a6e1096..7f96643f5f 100644 --- a/docs/iris/src/whatsnew/1.4.rst +++ b/docs/iris/src/whatsnew/1.4.rst @@ -1,96 +1,114 @@ -What's new in Iris 1.4 -********************** +v1.4 (14 Jun 2013) +****************** -:Release: 1.4.0 -:Date: 14 June 2013 - -This document explains the new/changed features of Iris in version 1.4. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.4 features -================= -A summary of the main features added with version 1.4: +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== * Multiple cubes can now be exported to a NetCDF file. + * Correct nearest-neighbour calculation with circular coords. + * :ref:`Experimental regridding enhancements`. + * :ref:`Iris-Pandas interoperability`. + * NIMROD level type 12 (levels below ground) can now be loaded. + * :ref:`Load cubes from the internet via OPeNDAP`. + * :ref:`GeoTiff export (experimental)`. + * :ref:`Cube merge update`. + * :ref:`Unambiguous season year naming`. + * NIMROD files with multiple fields and period of interest can now be loaded. + * Missing values are now handled when loading GRIB messages. + * PP export rule to calculate forecast period. + * :func:`~iris.cube.Cube.aggregated_by` now maintains array masking. + * IEEE 32bit fieldsfiles can now be loaded. + * NetCDF transverse mercator and climatology data can now be loaded. + * Polar stereographic GRIB data can now be loaded. + * :ref:`Cubes with no vertical coord can now be exported to GRIB`. + * :ref:`Simplified resource configuration`. + * :ref:`Extended GRIB parameter translation`. + * Added an optimisation for single-valued coordinate constraints. + * :ref:`One dimensional linear interpolation fix`. -* :ref:`Fix for iris.analysis.calculus.differentiate`. -* Fixed pickling of cubes with 2D aux coords from NetCDF. -* Fixed bug which ignored the "coords" keyword for certain plots. -* Use the latest release of Cartopy, v0.8.0. +* :ref:`Fix for iris.analysis.calculus.differentiate`. -Incompatible changes --------------------- -* As part of simplifying the mechanism for accessing test data, - :func:`iris.io.select_data_path`, :data:`iris.config.DATA_REPOSITORY`, - :data:`iris.config.MASTER_DATA_REPOSITORY` and - :data:`iris.config.RESOURCE_DIR` have been removed. +* Fixed pickling of cubes with 2D aux coords from NetCDF. -Deprecations ------------- -* The *add_custom_season_** functions from :mod:`~iris.coord_categorisation` have been deprecated in favour of adding their functionality to the *add_season_** functions +* Fixed bug which ignored the "coords" keyword for certain plots. +* Use the latest release of Cartopy, v0.8.0. .. _OPeNDAP: http://www.opendap.org/about - - .. _exp-regrid: Experimental regridding enhancements -==================================== +------------------------------------ + +Bilinear, area-weighted and area-conservative regridding functions are now +available in :mod:`iris.experimental`. These functions support masked data and +handle derived coordinates such as hybrid height. The final API is still in +development. -Bilinear, area-weighted and area-conservative regridding functions are now available in -:mod:`iris.experimental`. These functions support masked data and handle -derived coordinates such as hybrid height. The final API is still in development. In the meantime: + Bilinear rectilinear regridding ------------------------------- + :func:`~iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid` -can be used to regrid a cube onto a horizontal grid defined in a different coordinate system. -The data values are calculated using bilinear interpolation. +can be used to regrid a cube onto a horizontal grid defined in a differentiate +coordinate system. The data values are calculated using bilinear interpolation. For example:: from iris.experimental.regrid import regrid_bilinear_rectilinear_src_and_grid regridded_cube = regrid_bilinear_rectilinear_src_and_grid(source_cube, target_grid_cube) + Area-weighted regridding ------------------------ -:func:`~iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid` can be used to regrid a cube -such that the data values of the resulting cube are calculated using the -area-weighted mean. + +:func:`~iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid` +can be used to regrid a cube such that the data values of the resulting cube +are calculated using the area-weighted mean. For example:: from iris.experimental.regrid import regrid_area_weighted_rectilinear_src_and_grid as regrid_area_weighted regridded_cube = regrid_area_weighted(source_cube, target_grid_cube) + Area-conservative regridding ---------------------------- + :func:`~iris.experimental.regrid_conservative.regrid_conservative_via_esmpy` -can be used for area-conservative regridding between geographical coordinate systems. -This uses the ESMF library functions, via the ESMPy interface. +can be used for area-conservative regridding between geographical coordinate +systems. This uses the ESMF library functions, via the ESMPy interface. For example:: @@ -100,19 +118,21 @@ For example:: .. _iris-pandas: -Iris-Pandas interoperablilty -============================ +Iris-Pandas interoperability +---------------------------- + Conversion to and from Pandas Series_ and DataFrames_ is now available. See :mod:`iris.pandas` for more details. -.. _Series: http://pandas.pydata.org/pandas-docs/stable/api.html#series -.. _DataFrames: http://pandas.pydata.org/pandas-docs/stable/api.html#dataframe +.. _Series: https://pandas.pydata.org/pandas-docs/stable/reference/series.html +.. _DataFrames: https://pandas.pydata.org/pandas-docs/stable/reference/frame.html .. _load-opendap: Load cubes from the internet via OPeNDAP -======================================== +---------------------------------------- + Cubes can now be loaded directly from the internet, via OPeNDAP_. For example:: @@ -123,8 +143,10 @@ For example:: .. _geotiff_export: GeoTiff export -============== -With this experimental feature, two dimensional cubes can now be exported to GeoTiff files. +-------------- + +With this experimental feature, two dimensional cubes can now be exported to +GeoTiff files. For example:: @@ -139,17 +161,20 @@ For example:: .. _cube-merge-update: Cube merge update -================= +----------------- + Cube merging now favours numerical coordinates over string coordinates to describe a dimension, and :class:`~iris.coords.DimCoord` over :class:`~iris.coords.AuxCoord`. These modifications prevent the error: -*"No functional relationship between separable and inseparable candidate dimensions"*. +*"No functional relationship between separable and inseparable candidate +dimensions"*. .. _season-year-name: Unambiguous season year naming -============================== +------------------------------ + The default names of categorisation coordinates are now less ambiguous. For example, :func:`~iris.coord_categorisation.add_month_number` and :func:`~iris.coord_categorisation.add_month_fullname` now create @@ -159,15 +184,18 @@ For example, :func:`~iris.coord_categorisation.add_month_number` and .. _grib-novert: Cubes with no vertical coord can now be exported to GRIB -======================================================== +-------------------------------------------------------- + Iris can now export cubes with no vertical coord to GRIB. -The solution is still under discussion: See https://github.com/SciTools/iris/issues/519. +The solution is still under discussion: See +https://github.com/SciTools/iris/issues/519. .. _simple_cfg: Simplified resource configuration -================================= +--------------------------------- + A new configuration variable called :data:`iris.config.TEST_DATA_DIR` has been added, replacing the previous combination of :data:`iris.config.MASTER_DATA_REPOSITORY` and @@ -180,7 +208,8 @@ be set by adding a ``test_data_dir`` entry to the ``Resources`` section of .. _grib_params: Extended GRIB parameter translation -=================================== +----------------------------------- + - More GRIB2 params are recognised on input. - Now translates some codes on GRIB2 output. - Some GRIB2 params may load with a different standard_name. @@ -190,16 +219,37 @@ Extended GRIB parameter translation .. _one-d-linear: One dimensional linear interpolation fix -======================================== -:func:`~iris.analysis.interpolate.linear` can now extrapolate from a single point -assuming a gradient of zero. This prevents an issue when loading cross sections -with a hybrid height coordinate, on a staggered grid and only a single orography field. +---------------------------------------- + +:func:`~iris.analysis.interpolate.linear` can now extrapolate from a single +point assuming a gradient of zero. This prevents an issue when loading cross +sections with a hybrid height coordinate, on a staggered grid and only a single +orography field. .. _calc-diff-fix: Fix for iris.analysis.calculus.differentiate -============================================= -A bug in :func:`~iris.analysis.calculus.differentiate` that had the potential to cause -the loss of coordinate metadata when calculating the curl or the derivative of a cube has been fixed. +-------------------------------------------- + +A bug in :func:`~iris.analysis.calculus.differentiate` that had the potential +to cause the loss of coordinate metadata when calculating the curl or the +derivative of a cube has been fixed. + + +Incompatible changes +==================== + +* As part of simplifying the mechanism for accessing test data, + :func:`iris.io.select_data_path`, :data:`iris.config.DATA_REPOSITORY`, + :data:`iris.config.MASTER_DATA_REPOSITORY` and + :data:`iris.config.RESOURCE_DIR` have been removed. + +Deprecations +============ + +* The *add_custom_season_** functions from :mod:`~iris.coord_categorisation` + have been deprecated in favour of adding their functionality to the + *add_season_** functions + diff --git a/docs/iris/src/whatsnew/1.5.rst b/docs/iris/src/whatsnew/1.5.rst index 7af1e40285..07f54e15cf 100644 --- a/docs/iris/src/whatsnew/1.5.rst +++ b/docs/iris/src/whatsnew/1.5.rst @@ -1,16 +1,21 @@ -What's new in Iris 1.5 -********************** +v1.5 (13 Sep 2013) +****************** -:Release: 1.5.0 -:Date: 12 September 2013 - -This document explains the new/changed features of Iris in version 1.5. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.5 features -================= + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== + * Scatter plots can now be produced using :func:`iris.plot.scatter` and :func:`iris.quickplot.scatter`. + * The functions :func:`iris.plot.plot` and :func:`iris.quickplot.plot` now take up to two arguments, which may be cubes or coordinates, allowing the user to have full control over what is plotted on each axis. The coords keyword @@ -25,7 +30,9 @@ Iris 1.5 features * :class:`iris.analysis.SUM` is now a weighted aggregator, allowing it to take a weights keyword argument. + * GRIB2 translations added for standard_name 'soil_temperature'. + * :meth:`iris.cube.Cube.slices` can now handle passing dimension index as well as the currently supported types (string, coordinate), in order to slice in cases where there is no coordinate associated with a dimension (a mix of @@ -48,6 +55,7 @@ Iris 1.5 features plt.show() * Support for UM ancillary files truncated with the UM utility ieee + * Complete support for Transverse Mercator with saving to NetCDF also. .. code-block:: python @@ -70,18 +78,26 @@ Iris 1.5 features .. image:: images/transverse_merc.png * Support for loading NAME files (gridded and trajectory data). + * Multi-dimensional coordinate support added for :func:`iris.analysis.cartography.cosine_latitude_weights` + * Added limited packaged GRIB support (bulletin headers). + * In-place keyword added to :func:`iris.analysis.maths.divide` and :func:`iris.analysis.maths.multiply`. + * Performance gains for PP loading of the order of 40%. + * :mod:`iris.quickplot` now has a :func:`~iris.quickplot.show` function to provide convenient access to matplotlib.pyplot.show(). + * :meth:`iris.coords.DimCoord.from_regular` now implemented which creates a :class:`~iris.coords.DimCoord` with regularly spaced points, and optionally bounds. + * Iris can now cope with a missing bounds variable from NetCDF files. + * Added support for bool array indexing on a cube. .. code-block:: python @@ -95,73 +111,95 @@ Iris 1.5 features * Added support for loading fields defined on regular Gaussian grids from GRIB files. + * :func:`iris.analysis.interpolate.extract_nearest_neighbour` now works without needing to load the data (especially relevant to large datasets). + * When using plotting routines from :mod:`iris.plot` or :mod:`iris.quickplot`, the direction of vertical axes will be reversed if the corresponding coordinate has a "positive" attribute set to "down". - see: :ref:`Oceanography-atlantic_profiles` + see: :ref:`sphx_glr_generated_gallery_oceanography_plot_atlantic_profiles.py` * New PP stashcode translations added including 'dewpoint' and 'relative_humidity'. + * Added implied heights for several common PP STASH codes. + * GeoTIFF export capability enhanced for supporting various data types, coord systems and mapping 0 to 360 longitudes to the -180 to 180 range. Bugs fixed ----------- +========== + * NetCDF error handling on save has been extended to capture file path and permission errors. + * Shape of the Earth scale factors are now correctly interpreted by the GRIB loader. They were previously used as a multiplier for the given value but should have been used as a decimal shift. + * OSGB definition corrected. + * Transverse Mercator on load now accepts the following interchangeably due to inconsistencies in CF documentation: - * +scale_factor_at_central_meridian <-> scale_factor_at_projection_origin - * +longitude_of_central_meridian <-> longitude_of_projection_origin - (+recommended encoding) + + * +scale_factor_at_central_meridian <-> scale_factor_at_projection_origin + + * +longitude_of_central_meridian <-> longitude_of_projection_origin + (+recommended encoding) + * Ellipse description now maintained when converting GeogCS to cartopy. + * GeoTIFF export bug fixes. + * Polar axis now set to the North Pole, when a cube with no coordinate system is saved to the PP file-format. + * :meth:`iris.coords.DimCoord.from_coord` and :meth:`iris.coords.AuxCoord.from_coord` now correctly returns a copy of the source coordinate's coordinate system. + * Units part of the axis label is now omitted when the coordinate it represents is given as a time reference (:mod:`iris.quickplot`). + * CF dimension coordinate is now maintained in the resulting cube when a cube with CF dimension coordinate is being aggregated over. + * Units for Lambert conformal and polar stereographic coordinates now defined as meters. + * Various fieldsfile load bugs including failing to read the coordinates from the file have been fixed. + * Coding of maximum and minimum time-stats in GRIB2 saving has been fixed. -* Example code in section 4.1 of the userguide updated so it uses a sample + +* Example code in section 4.1 of the user guide updated so it uses a sample data file that exists. + * Zorder of contour lines drawn by :func:`~iris.plot.contourf` has been changed to address issue of objects appearing in-between line and filled contours. + * Coord comparisons now function correctly when comparing to numpy scalars. + * Cube loading constraints and :meth:`iris.cube.Cube.extract` correctly implement cell equality methods. -Incompatible changes --------------------- -* N/A - Deprecations ------------- +============ + * The coords keyword argument for :func:`iris.plot.plot` and :func:`iris.quickplot.plot` has been deprecated due to the new API which accepts multiple cubes or coordinates. + * :meth:`iris.fileformats.pp.PPField.regular_points` and :meth:`iris.fileformats.pp.PPField.regular_bounds` have now been deprecated in favour of a new factory method :meth:`iris.coords.DimCoord.from_regular()`. + * :func:`iris.fileformats.pp.add_load_rules` and :func:`iris.fileformats.grib.add_load_rules` are now deprecated. diff --git a/docs/iris/src/whatsnew/1.6.rst b/docs/iris/src/whatsnew/1.6.rst index 4b540c6cc9..068311db5f 100644 --- a/docs/iris/src/whatsnew/1.6.rst +++ b/docs/iris/src/whatsnew/1.6.rst @@ -1,14 +1,17 @@ -What's new in Iris 1.6 -********************** +v1.6 (26 Jan 2014) +****************** -:Release: 1.6.1 -:Date: 18th February 2014 - -This document explains the new/changed features of Iris in version 1.6. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.6 features -================= + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== .. _showcase: @@ -29,9 +32,9 @@ Iris 1.6 features >>> print([str(cell) for cell in coord.cells()]) ['1970-01-01 01:00:00', '1970-01-01 02:00:00', '1970-01-01 03:00:00'] - Note that, either a :class:`datetime.datetime` or :class:`netcdftime.datetime` - object instance will be returned, depending on the calendar of the time - reference coordinate. + Note that, either a :class:`datetime.datetime` or + :class:`netcdftime.datetime` object instance will be returned, depending on + the calendar of the time reference coordinate. This capability permits the ability to express time constraints more naturally when the cell represents a *datetime-like* object. @@ -41,8 +44,10 @@ Iris 1.6 features # Ignore the 1st of January. iris.Constraint(time=lambda cell: cell.point.month != 1 and cell.point.day != 1) - Note that, :class:`iris.Future` also supports a `context manager `_ - which allows multiple sections of code to execute with different run-time behaviour. + Note that, :class:`iris.Future` also supports a + `context manager `_ + which allows multiple sections of code to execute with different run-time + behaviour. .. code-block:: python @@ -63,12 +68,12 @@ Iris 1.6 features :class:`datetime.datetime` or :class:`netcdftime.datetime`. The *year, month, day, hour, minute, second* and *microsecond* attributes of - a :class:`iris.time.PartialDateTime` object may be fully or partially specified - for any given comparison. + a :class:`iris.time.PartialDateTime` object may be fully or partially + specified for any given comparison. This is particularly useful for time based constraints, whilst enabling the - :data:`iris.FUTURE.cell_datetime_objects`, see :ref:`here ` for further - details on this new release feature. + :data:`iris.FUTURE.cell_datetime_objects`, see :ref:`here ` for + further details on this new release feature. .. code-block:: python @@ -85,139 +90,64 @@ Iris 1.6 features * GRIB loading supports latitude/longitude or Gaussian reduced grids for version 1 and version 2. + * :ref:`A new utility function to assist with caching`. + * :ref:`The RMS aggregator supports weights`. + * :ref:`A new experimental function to equalise cube attributes`. + * :ref:`Collapsing a cube provides a tolerance level for missing-data`. + * NAME loading supports vertical coordinates. + * UM land/sea mask de-compression for Fieldsfiles and PP files. + * Lateral boundary condition Fieldsfile support. + * Staggered grid support for Fieldsfiles extended to type 6 (Arakawa C grid with v at poles). + * Extend support for Fieldsfiles with grid codes 11, 26, 27, 28 and 29. + * :ref:`Promoting a scalar coordinate to new leading cube dimension`. + * Interpreting cell methods from NAME. + * GRIB2 export without forecast_period, enabling NAME to GRIB2. + * Loading height levels from GRIB2. + * :func:`iris.coord_categorisation.add_categorised_coord` now supports multi-dimensional coordinate categorisation. -* Fieldsfiles and PP support for loading and saving of air potential temperature. + +* Fieldsfiles and PP support for loading and saving of air potential + temperature. + * :func:`iris.experimental.regrid.regrid_weighted_curvilinear_to_rectilinear` regrids curvilinear point data to a target rectilinear grid using associated area weights. -* Extended capability of the NetCDF saver :meth:`iris.fileformats.netcdf.Saver.write` - for fine-tune control of a :mod:`netCDF4.Variable`. Also allows multiple dimensions - to be nominated as *unlimited*. -* :ref:`A new PEAK aggregator providing spline interpolation`. -* A new utility function :func:`iris.util.broadcast_to_shape`. -* A new utility function :func:`iris.util.as_compatible_shape`. -* Iris tests can now be run on systems where directory write permissions - previously did not allow it. This is achieved by writing to the current working - directory in such cases. -* Support for 365 day calendar PP fields. -* Added phenomenon translation between cf and grib2 for wind (from) direction. -* PP files now retain lbfc value on save, derived from the stash attribute. -Bugs fixed -========== -* :meth:`iris.cube.Cube.rolling_window` has been extended to support masked arrays. -* :meth:`iris.cube.Cube.collapsed` now handles string coordinates. -* Default LBUSER(2) to -99 for Fieldsfile and PP saving. -* :func:`iris.util.monotonic` returns the correct direction. -* File loaders correctly parse filenames containing colons. -* ABF loader now correctly loads the ABF data payload once. -* Support for 1D array :data:`iris.cube.cube.attributes`. -* GRIB bounded level saving fix. -* :func:`iris.analysis.cartography.project` now associates a coordinate system - with the resulting target cube, where applicable. -* :func:`iris.util.array_equal` now correctly ignores any mask if present, - matching the behaviour of :func:`numpy.array_equal` except with string array - support. -* :func:`iris.analysis.interpolate.linear` now retains a mask in the resulting - cube. -* :meth:`iris.coords.DimCoord.from_regular` now correctly returns a coordinate - which will always be regular as indicated by :func:`~iris.util.is_regular`. -* :func:`iris.util.rolling_window` handling of masked arrays (degenerate - masks) fixed. -* Exception no longer raised for any ellipsoid definition in nimrod loading. +* Extended capability of the NetCDF saver + :meth:`iris.fileformats.netcdf.Saver.write` for fine-tune control of a + :mod:`netCDF4.Variable`. Also allows multiple dimensions to be nominated as + *unlimited*. -Incompatible changes -==================== -* The experimental 'concatenate' function is now a method of a - :class:`iris.cube.CubeList`, see :meth:`iris.cube.CubeList.concatenate`. The - functionality is unchanged. -* :meth:`iris.cube.Cube.extract_by_trajectory()` has been removed. - Instead, use :func:`iris.analysis.trajectory.interpolate()`. -* :func:`iris.load_strict()` has been removed. - Instead, use :func:`iris.load_cube()` and :func:`iris.load_cubes()`. -* :meth:`iris.coords.Coord.cos()` and :meth:`iris.coords.Coord.sin()` - have been removed. -* :meth:`iris.coords.Coord.unit_converted()` has been removed. - Instead, make a copy of the coordinate using - :meth:`iris.coords.Coord.copy()` and then call the - :meth:`iris.coords.Coord.convert_units()` method of the new - coordinate. -* Iteration over a :class:`~iris.cube.Cube` has been removed. Instead, - use :meth:`iris.cube.Cube.slices()`. -* The following :class:`~iris.unit.Unit` deprecated methods/properties have been removed. - - ====================================== =========================================== - Removed property/method New method - ====================================== =========================================== - :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()` - :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()` - :attr:`~iris.unit.Unit.no_unit` :meth:`~iris.unit.Unit.is_no_unit()` - :attr:`~iris.unit.Unit.time_reference` :meth:`~iris.unit.Unit.is_time_reference()` - :attr:`~iris.unit.Unit.unknown` :meth:`~iris.unit.Unit.is_unknown()` - ====================================== =========================================== -* As a result of deprecating :meth:`iris.cube.Cube.add_history` and removing the - automatic appending of history by operations such as cube arithmetic, - collapsing, and aggregating, the signatures of a number of functions within - :mod:`iris.analysis.maths` have been modified along with that of - :class:`iris.analysis.Aggregator` and :class:`iris.analysis.WeightedAggregator`. -* The experimental ABF and ABL functionality has now been promoted to - core functionality in :mod:`iris.fileformats.abf`. -* The following :mod:`iris.coord_categorisation` deprecated functions have been - removed. +* :ref:`A new PEAK aggregator providing spline interpolation`. - =============================================================== ======================================================= - Removed function New function - =============================================================== ======================================================= - :func:`~iris.coord_categorisation.add_custom_season` :func:`~iris.coord_categorisation.add_season` - :func:`~iris.coord_categorisation.add_custom_season_number` :func:`~iris.coord_categorisation.add_season_number` - :func:`~iris.coord_categorisation.add_custom_season_year` :func:`~iris.coord_categorisation.add_season_year` - :func:`~iris.coord_categorisation.add_custom_season_membership` :func:`~iris.coord_categorisation.add_season_membership` - :func:`~iris.coord_categorisation.add_month_shortname` :func:`~iris.coord_categorisation.add_month` - :func:`~iris.coord_categorisation.add_weekday_shortname` :func:`~iris.coord_categorisation.add_weekday` - :func:`~iris.coord_categorisation.add_season_month_initials` :func:`~iris.coord_categorisation.add_season` - =============================================================== ======================================================= -* When a cube is loaded from PP or GRIB and it has both time and forecast period - coordinates, and the time coordinate has bounds, the forecast period coordinate - will now also have bounds. These bounds will be aligned with the bounds of the - time coordinate taking into account the forecast reference time. Also, - the forecast period point will now be aligned with the time point. +* A new utility function :func:`iris.util.broadcast_to_shape`. -Deprecations -============ -* :meth:`iris.cube.Cube.add_history` has been deprecated in favour - of users modifying/creating the history metadata directly. This is - because the automatic behaviour did not deliver a sufficiently complete, - auditable history and often prevented the merging of cubes. -* :func:`iris.util.broadcast_weights` has been deprecated and replaced - by the new utility function :func:`iris.util.broadcast_to_shape`. -* Callback mechanism `iris.run_callback` has had its deprecation of return - values revoked. The callback can now return cube instances as well as - inplace changes to the cube. +* A new utility function :func:`iris.util.as_compatible_shape`. -New Contributors -================ -Congratulations and thank you to `felicityguest `_, `jkettleb `_, -`kwilliams-mo `_ and `shoyer `_ who all made their first contribution -to Iris! +* Iris tests can now be run on systems where directory write permissions + previously did not allow it. This is achieved by writing to the current + working directory in such cases. +* Support for 365 day calendar PP fields. ----- +* Added phenomenon translation between cf and grib2 for wind (from) direction. +* PP files now retain lbfc value on save, derived from the stash attribute. .. _caching: @@ -249,7 +179,8 @@ consuming processing, or to reap the benefit of fast-loading a pickled cube. .. _rms: The RMS aggregator supports weights -=================================== +----------------------------------- + The :data:`iris.analysis.RMS` aggregator has been extended to allow the use of weights using the new keyword argument :data:`weights`. @@ -264,7 +195,8 @@ For example, an RMS weighted cube collapse is performed as follows: .. _equalise: Equalise cube attributes -======================== +------------------------ + To assist with :class:`iris.cube.Cube` merging, the new experimental in-place function :func:`iris.experimental.equalise_cubes.equalise_attributes` ensures that a sequence of cubes contains a common set of :data:`iris.cube.Cube.attributes`. @@ -276,7 +208,8 @@ have the same attributes. .. _tolerance: Masking a collapsed result by missing-data tolerance -==================================================== +---------------------------------------------------- + The result from collapsing masked cube data may now be completely masked by providing a :data:`mdtol` missing-data tolerance keyword to :meth:`iris.cube.Cube.collapsed`. @@ -289,7 +222,8 @@ less than or equal to the provided tolerance. .. _promote: Promote a scalar coordinate -=========================== +--------------------------- + The new utility function :func:`iris.util.new_axis` creates a new cube with a new leading dimension of size unity. If a scalar coordinate is provided, then the scalar coordinate is promoted to be the dimension coordinate for the new @@ -301,7 +235,8 @@ Note that, this function will load the data payload of the cube. .. _peak: A new PEAK aggregator providing spline interpolation -==================================================== +---------------------------------------------------- + The new :data:`iris.analysis.PEAK` aggregator calculates the global peak value from a spline interpolation of the :class:`iris.cube.Cube` data payload along a nominated coordinate axis. @@ -312,3 +247,138 @@ For example, to calculate the peak time: from iris.analysis import PEAK collapsed_cube = cube.collapsed('time', PEAK) + + +Bugs fixed +========== + +* :meth:`iris.cube.Cube.rolling_window` has been extended to support masked + arrays. + +* :meth:`iris.cube.Cube.collapsed` now handles string coordinates. + +* Default LBUSER(2) to -99 for Fieldsfile and PP saving. + +* :func:`iris.util.monotonic` returns the correct direction. + +* File loaders correctly parse filenames containing colons. + +* ABF loader now correctly loads the ABF data payload once. + +* Support for 1D array :data:`iris.cube.cube.attributes`. + +* GRIB bounded level saving fix. + +* :func:`iris.analysis.cartography.project` now associates a coordinate system + with the resulting target cube, where applicable. + +* :func:`iris.util.array_equal` now correctly ignores any mask if present, + matching the behaviour of :func:`numpy.array_equal` except with string array + support. + +* :func:`iris.analysis.interpolate.linear` now retains a mask in the resulting + cube. + +* :meth:`iris.coords.DimCoord.from_regular` now correctly returns a coordinate + which will always be regular as indicated by :func:`~iris.util.is_regular`. + +* :func:`iris.util.rolling_window` handling of masked arrays (degenerate + masks) fixed. + +* Exception no longer raised for any ellipsoid definition in nimrod loading. + + +Incompatible changes +==================== + +* The experimental 'concatenate' function is now a method of a + :class:`iris.cube.CubeList`, see :meth:`iris.cube.CubeList.concatenate`. The + functionality is unchanged. + +* :meth:`iris.cube.Cube.extract_by_trajectory()` has been removed. + Instead, use :func:`iris.analysis.trajectory.interpolate()`. + +* :func:`iris.load_strict()` has been removed. + Instead, use :func:`iris.load_cube()` and :func:`iris.load_cubes()`. + +* :meth:`iris.coords.Coord.cos()` and :meth:`iris.coords.Coord.sin()` + have been removed. + +* :meth:`iris.coords.Coord.unit_converted()` has been removed. + Instead, make a copy of the coordinate using + :meth:`iris.coords.Coord.copy()` and then call the + :meth:`iris.coords.Coord.convert_units()` method of the new + coordinate. + +* Iteration over a :class:`~iris.cube.Cube` has been removed. Instead, + use :meth:`iris.cube.Cube.slices()`. + +* The following :class:`~iris.unit.Unit` deprecated methods/properties have + been removed. + + ====================================== =========================================== + Removed property/method New method + ====================================== =========================================== + :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()` + :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()` + :attr:`~iris.unit.Unit.no_unit` :meth:`~iris.unit.Unit.is_no_unit()` + :attr:`~iris.unit.Unit.time_reference` :meth:`~iris.unit.Unit.is_time_reference()` + :attr:`~iris.unit.Unit.unknown` :meth:`~iris.unit.Unit.is_unknown()` + ====================================== =========================================== + +* As a result of deprecating :meth:`iris.cube.Cube.add_history` and removing the + automatic appending of history by operations such as cube arithmetic, + collapsing, and aggregating, the signatures of a number of functions within + :mod:`iris.analysis.maths` have been modified along with that of + :class:`iris.analysis.Aggregator` and + :class:`iris.analysis.WeightedAggregator`. + +* The experimental ABF and ABL functionality has now been promoted to + core functionality in :mod:`iris.fileformats.abf`. + +* The following :mod:`iris.coord_categorisation` deprecated functions have been + removed. + + =============================================================== ======================================================= + Removed function New function + =============================================================== ======================================================= + :func:`~iris.coord_categorisation.add_custom_season` :func:`~iris.coord_categorisation.add_season` + :func:`~iris.coord_categorisation.add_custom_season_number` :func:`~iris.coord_categorisation.add_season_number` + :func:`~iris.coord_categorisation.add_custom_season_year` :func:`~iris.coord_categorisation.add_season_year` + :func:`~iris.coord_categorisation.add_custom_season_membership` :func:`~iris.coord_categorisation.add_season_membership` + :func:`~iris.coord_categorisation.add_month_shortname` :func:`~iris.coord_categorisation.add_month` + :func:`~iris.coord_categorisation.add_weekday_shortname` :func:`~iris.coord_categorisation.add_weekday` + :func:`~iris.coord_categorisation.add_season_month_initials` :func:`~iris.coord_categorisation.add_season` + =============================================================== ======================================================= + +* When a cube is loaded from PP or GRIB and it has both time and forecast period + coordinates, and the time coordinate has bounds, the forecast period + coordinate will now also have bounds. These bounds will be aligned with the + bounds of the time coordinate taking into account the forecast reference + time. Also, the forecast period point will now be aligned with the time point. + + +Deprecations +============ + +* :meth:`iris.cube.Cube.add_history` has been deprecated in favour + of users modifying/creating the history metadata directly. This is + because the automatic behaviour did not deliver a sufficiently complete, + auditable history and often prevented the merging of cubes. + +* :func:`iris.util.broadcast_weights` has been deprecated and replaced + by the new utility function :func:`iris.util.broadcast_to_shape`. + +* Callback mechanism `iris.run_callback` has had its deprecation of return + values revoked. The callback can now return cube instances as well as + inplace changes to the cube. + + +New Contributors +================ +Congratulations and thank you to +`felicityguest `_, +`jkettleb `_, +`kwilliams-mo `_ and +`shoyer `_ who all made their first contribution +to Iris! diff --git a/docs/iris/src/whatsnew/1.7.rst b/docs/iris/src/whatsnew/1.7.rst index 2f3a52fbb9..e60c1083d9 100644 --- a/docs/iris/src/whatsnew/1.7.rst +++ b/docs/iris/src/whatsnew/1.7.rst @@ -1,22 +1,26 @@ -What's new in Iris 1.7 -********************** +v1.7 (04 Jul 2014) +******************** -This document explains the new/changed features of Iris in version 1.7. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -:Release: 1.7.4 -:Date: 15th April 2015 -Iris 1.7 features -================= +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== .. _showcase: .. admonition:: Showcase: Iris is making use of Biggus - Iris is now making extensive use of `Biggus `_ - for virtual arrays and lazy array evaluation. In practice this means that analyses - of cubes with data bigger than the available system memory are now possible. + Iris is now making extensive use of + `Biggus `_ for virtual arrays and lazy + array evaluation. In practice this means that analyses of cubes with data + bigger than the available system memory are now possible. Other than the improved functionality the changes are mostly transparent; for example, before the introduction of biggus, MemoryErrors @@ -33,20 +37,20 @@ Iris 1.7 features >>> print(type(result)) - Memory is still a limiting factor if ever the data is desired as a NumPy array - (e.g. via :data:`cube.data `), but additional methods have - been added to the Cube to support querying and subsequently accessing the "lazy" - data form (see :meth:`~iris.cube.Cube.has_lazy_data` and - :meth:`~iris.cube.Cube.lazy_data`). + Memory is still a limiting factor if ever the data is desired as a NumPy + array (e.g. via :data:`cube.data `), but additional + methods have been added to the Cube to support querying and subsequently + accessing the "lazy" data form (see :meth:`~iris.cube.Cube.has_lazy_data` + and :meth:`~iris.cube.Cube.lazy_data`). .. admonition:: Showcase: New interpolation and regridding API - New interpolation and regridding interfaces have been added which simplify and - extend the existing functionality. + New interpolation and regridding interfaces have been added which simplify + and extend the existing functionality. The interfaces are exposed on the cube in the form of the - :meth:`~iris.cube.Cube.interpolate` and :meth:`~iris.cube.Cube.regrid` methods. - Conceptually the signatures of the methods are:: + :meth:`~iris.cube.Cube.interpolate` and :meth:`~iris.cube.Cube.regrid` + methods. Conceptually the signatures of the methods are:: interpolated_cube = cube.interpolate(interpolation_points, interpolation_scheme) @@ -55,16 +59,17 @@ Iris 1.7 features regridded_cube = cube.regrid(target_grid_cube, regridding_scheme) Whilst not all schemes have been migrated to the new interface, - :class:`iris.analysis.Linear` defines both linear interpolation and regridding, - and :class:`iris.analysis.AreaWeighted` defines an area weighted regridding - scheme. + :class:`iris.analysis.Linear` defines both linear interpolation and + regridding, and :class:`iris.analysis.AreaWeighted` defines an area weighted + regridding scheme. .. admonition:: Showcase: Merge and concatenate reporting Merge reporting is designed as an aid to the merge processes. Should merging - a :class:`~iris.cube.CubeList` fail, merge reporting means that a descriptive - error will be raised that details the differences between the cubes in the - :class:`~iris.cube.CubeList` that prevented the merge from being successful. + a :class:`~iris.cube.CubeList` fail, merge reporting means that a + descriptive error will be raised that details the differences between the + cubes in the :class:`~iris.cube.CubeList` that prevented the merge from + being successful. A new :class:`~iris.cube.CubeList` method, called :meth:`~iris.cube.CubeList.merge_cube`, has been introduced. Calling it on a @@ -83,8 +88,8 @@ Iris 1.7 features iris.exceptions.MergeError: failed to merge into a single cube. cube.attributes keys differ: 'foo' - The naming of this new method mirrors that of Iris load functions, where - one would always expect a :class:`~iris.cube.CubeList` from :func:`iris.load` + The naming of this new method mirrors that of Iris load functions, where one + would always expect a :class:`~iris.cube.CubeList` from :func:`iris.load` and a :class:`~iris.cube.Cube` from :func:`iris.load_cube`. Concatenate reporting is the equivalent process for concatenating a @@ -101,10 +106,10 @@ Iris 1.7 features However, the additional richness of Iris coordinate meta-data provides an enhanced capability beyond the basic broadcasting behaviour of NumPy. - This means that when performing cube arithmetic, the dimensionality and shape of - cubes no longer need to match. For example, if the dimensionality of a cube is - reduced by collapsing, then the result can be used to subtract from the original - cube to calculate an anomaly:: + This means that when performing cube arithmetic, the dimensionality and + shape of cubes no longer need to match. For example, if the dimensionality + of a cube is reduced by collapsing, then the result can be used to subtract + from the original cube to calculate an anomaly:: >>> time_mean = original_cube.collapsed('time', iris.analysis.MEAN) >>> mean_anomaly = original_cube - time_mean @@ -117,132 +122,218 @@ Iris 1.7 features >>> zero_cube = original_cube - similar_cube * Merge reporting that raises a descriptive error if the merge process fails. -* Linear interpolation and regridding now make use of SciPy's RegularGridInterpolator - for much faster linear interpolation. + +* Linear interpolation and regridding now make use of SciPy's + RegularGridInterpolator for much faster linear interpolation. + * NAME file loading now handles the "no time averaging" column and translates - height/altitude above ground/sea-level columns into appropriate coordinate metadata. -* The NetCDF saver has been extended to allow saving of cubes with hybrid pressure - auxiliary factories. -* PP/FF loading supports LBLEV of 9999. -* Extended GRIB1 loading to support data on hybrid pressure levels. -* :func:`iris.coord_categorisation.add_day_of_year` can be used to add categorised - day of year coordinates based on time coordinates with non-Gregorian calendars. + height/altitude above ground/sea-level columns into appropriate coordinate + metadata. + +* The NetCDF saver has been extended to allow saving of cubes with hybrid + pressure auxiliary factories. + +* PP/FF loading supports LBLEV of 9999. + +* Extended GRIB1 loading to support data on hybrid pressure levels. + +* :func:`iris.coord_categorisation.add_day_of_year` can be used to add + categorised day of year coordinates based on time coordinates with + non-Gregorian calendars. + * Support for loading data on reduced grids from GRIB files in raw form without automatically interpolating to a regular grid. + * The coordinate systems :class:`iris.coord_systems.Orthographic` and - :class:`iris.coord_systems.VerticalPerspective` (for imagery from geostationary - satellites) have been added. -* Extended NetCDF loading to support the "ocean sigma over z" auxiliary coordinate + :class:`iris.coord_systems.VerticalPerspective` (for imagery from + geostationary satellites) have been added. + +* Extended NetCDF loading to support the "ocean sigma over z" auxiliary + coordinate factory. + * Support added for loading CF-NetCDF data with bounds arrays that are missing a vertex dimension. + * :meth:`iris.cube.Cube.rolling_window` can now be used with string-based :class:`iris.coords.AuxCoord` instances. + * Loading of PP and FF files has been optimised through deferring creation of PPField attributes. + * Automatic association of a coordinate's CF formula terms variable with the data variable associated with that coordinate. -* PP loading translates cross-section height into a dimensional auxiliary coordinate. -* String auxiliary coordinates can now be plotted with the Iris plotting wrappers. -* :func:`iris.analysis.geometry.geometry_area_weights` now allows for the calculation of - normalized cell weights. -* Many new translations between the CF spec and STASH codes or GRIB2 parameter codes. -* PP save rules add the data's UM Version to the attributes of the saved file - when appropriate. + +* PP loading translates cross-section height into a dimensional auxiliary + coordinate. + +* String auxiliary coordinates can now be plotted with the Iris + plotting wrappers. + +* :func:`iris.analysis.geometry.geometry_area_weights` now + allows for the calculation of normalized cell weights. + +* Many new translations between the CF spec and STASH codes or GRIB2 parameter + codes. + +* PP save rules add the data's UM Version to the attributes of the saved + file when appropriate. + * NetCDF reference surface variable promotion available through the :class:`iris.FUTURE` mechanism. -* A speed improvement in calculation of :func:`iris.analysis.geometry.geometry_area_weights`. -* The mdtol keyword was added to area-weighted regridding to allow control of the - tolerance for missing data. For a further description of this concept, see + +* A speed improvement in calculation of + :func:`iris.analysis.geometry.geometry_area_weights`. + +* The mdtol keyword was added to area-weighted regridding to allow control of + the tolerance for missing data. For a further description of this concept, see :class:`iris.analysis.AreaWeighted`. + * Handling for patching of the CF conventions global attribute via a defined cf_patch_conventions function. -* Deferred GRIB data loading has been introduced for reduced memory consumption when - loading GRIB files. + +* Deferred GRIB data loading has been introduced for reduced memory consumption + when loading GRIB files. + * Concatenate reporting that raises a descriptive error if the concatenation process fails. + * A speed improvement when loading PP or FF data and constraining on STASH code. + Bugs fixed ========== + * Data containing more than one reference cube for constructing hybrid height coordinates can now be loaded. + * Removed cause of increased margin of error when interpolating. + * Changed floating-point precision used when wrapping points for interpolation. + * Mappables that can be used to generate colorbars are now returned by Iris plotting wrappers. -* NetCDF load ignores over-specified formula terms on bounded dimensionless vertical - coordinates. + +* NetCDF load ignores over-specified formula terms on bounded dimensionless + vertical coordinates. + * Auxiliary coordinate factory loading now correctly interprets formula term - varibles for "atmosphere hybrid sigma pressure" coordinate data. + variables for "atmosphere hybrid sigma pressure" coordinate data. + * Corrected comparison of NumPy NaN values in cube merge process. -* Fixes for :meth:`iris.cube.Cube.intersection` to correct calculating the intersection - of a cube with split bounds, handling of circular coordinates, handling of - monotonically descending bounded coordinats and for finding a wrapped two-point - result and longitude tolerances. -* A bug affecting :meth:`iris.cube.Cube.extract` and :meth:`iris.cube.CubeList.extract` - that led to unexpected behaviour when operating on scalar cubes has been fixed. -* Aggregate_by may now be passed single-value coordinates. -* Making a copy of a :class:`iris.coords.DimCoord` no longer results in the writeable - flag on the copied points and bounds arrays being set to True. -* Can now save to PP a cube that has vertical levels but no orography. + +* Fixes for :meth:`iris.cube.Cube.intersection` to correct calculating the + intersection of a cube with split bounds, handling of circular coordinates, + handling of monotonically descending bounded coordinates and for finding a + wrapped two-point result and longitude tolerances. + +* A bug affecting :meth:`iris.cube.Cube.extract` and + :meth:`iris.cube.CubeList.extract` that led to unexpected behaviour when + operating on scalar cubes has been fixed. + +* Aggregate_by may now be passed single-value coordinates. + +* Making a copy of a :class:`iris.coords.DimCoord` no longer results in the + writeable flag on the copied points and bounds arrays being set to True. + +* Can now save to PP a cube that has vertical levels but no orography. + * Fix a bug causing surface altitude and surface pressure fields to not appear in cubes loaded with a STASH constraint. -* Fixed support for :class:`iris.fileformats.pp.STASH` objects in STASH constraints. -* A fix to avoid a problem where cube attribute names clash with NetCDF reserved attribute names. -* A fix to allow :meth:`iris.cube.CubeList.concatenate` to deal with descending coordinate order. -* Add missing NetCDF attribute `varname` when constructing a new :class:`iris.coords.AuxCoord`. -* The datatype of time arrays converted with :func:`iris.util.unify_time_units` is now preserved. -Bugs fixed in v1.7.3 +* Fixed support for :class:`iris.fileformats.pp.STASH` objects in STASH + constraints. + +* A fix to avoid a problem where cube attribute names clash with + NetCDF reserved attribute names. + +* A fix to allow :meth:`iris.cube.CubeList.concatenate` to deal with descending + coordinate order. + +* Add missing NetCDF attribute `varname` when constructing a new + :class:`iris.coords.AuxCoord`. * The datatype of time arrays converted with + :func:`iris.util.unify_time_units` is now preserved. + + +v1.7.3 (16 Dec 2014) ^^^^^^^^^^^^^^^^^^^^ -* Scalar dimension coordinates can now be concatenated with :meth:`iris.cube.CubeList.concatenate`. -* Arbitrary names can no longer be set for elements of a :class:`iris.fileformats.pp.SplittableInt`. -* Cubes that contain a pseudo-level coordinate can now be saved to PP. -* Fixed a bug in the FieldsFile loader that prevented it always loading all available fields. -Bugs fixed in v1.7.4 +* Scalar dimension coordinates can now be concatenated with + :meth:`iris.cube.CubeList.concatenate`. + +* Arbitrary names can no longer be set + for elements of a :class:`iris.fileformats.pp.SplittableInt`. + +* Cubes that contain a pseudo-level coordinate can now be saved to PP. + +* Fixed a bug in the FieldsFile loader that prevented it always loading all + available fields. + + +v1.7.4 (15 Apr 2015) ^^^^^^^^^^^^^^^^^^^^ + * :meth:`Coord.guess_bounds` can now deal with circular coordinates. + * :meth:`Coord.nearest_neighbour_index` can now work with descending bounds. + * Passing `weights` to :meth:`Cube.rolling_window` no longer prevents other keyword arguments from being passed to the aggregator. + * Several minor fixes to allow use of Iris on Windows. + * Made use of the new standard_parallels keyword in Cartopy's LambertConformal projection (Cartopy v0.12). Older versions of Iris will not be able to create LambertConformal coordinate systems with Cartopy >= 0.12. + Incompatible changes ==================== + * Saving a cube with a STASH attribute to NetCDF now produces a variable with an attribute of "um_stash_source" rather than "ukmo__um_stash_source". -* Cubes saved to NetCDF with a coordinate system referencing a spherical ellipsoid - now result in the grid mapping variable containing only the "earth_radius" attribute, - rather than the "semi_major_axis" and "semi_minor_axis". -* Collapsing a cube over all of its dimensions now results in a scalar cube rather - than a 1d cube. + +* Cubes saved to NetCDF with a coordinate system referencing a spherical + ellipsoid now result in the grid mapping variable containing only the + "earth_radius" attribute, rather than the "semi_major_axis" and + "semi_minor_axis". + +* Collapsing a cube over all of its dimensions now results in a scalar cube + rather than a 1d cube. + Deprecations ============ + * :func:`iris.util.ensure_array` has been deprecated. + * Deprecated the :func:`iris.fileformats.pp.reset_load_rules` and :func:`iris.fileformats.grib.reset_load_rules` functions. + * Matplotlib is no longer a core Iris dependency. -Documentation Changes -===================== + +Documentation +============= + * New sections on :ref:`cube broadcasting ` and :doc:`regridding and interpolation ` have been added to the :doc:`user guide `. + * An example demonstrating custom log-scale colouring has been added. - See :ref:`General-anomaly_log_colouring`. + See :ref:`sphx_glr_generated_gallery_general_plot_anomaly_log_colouring.py`. + * An example demonstrating the creation of a custom :class:`iris.analysis.Aggregator` has been added. - See :ref:`General-custom_aggregation`. + See :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py`. + * An example of reprojecting data from 2D auxiliary spatial coordinates - (such as that from the ORCA grid) has been added. See :ref:`General-orca_projection`. -* A clarification of the behaviour of :func:`iris.analysis.calculus.differentiate`. -* A new :doc:`"whitepapers" ` section has been added to the documentation along - with the addition of a paper providing an :doc:`overview of the load process for UM-like - fileformats (e.g. PP and Fieldsfile) `. + (such as that from the ORCA grid) has been added. See + :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py`. + +* A clarification of the behaviour of + :func:`iris.analysis.calculus.differentiate`. +* A new :doc:`"Technical Papers" ` section has been added to + the documentation along with the addition of a paper providing an + :doc:`overview of the load process for UM-like fileformats (e.g. PP and Fieldsfile) `. diff --git a/docs/iris/src/whatsnew/1.8.rst b/docs/iris/src/whatsnew/1.8.rst index c763411ed8..17432d7267 100644 --- a/docs/iris/src/whatsnew/1.8.rst +++ b/docs/iris/src/whatsnew/1.8.rst @@ -1,14 +1,17 @@ -What's new in Iris 1.8 -********************** +v1.8 (14 Apr 2015) +******************** -:Release: 1.8.1 -:Date: 3rd June 2015 - -This document explains the new/changed features of Iris in version 1.8. +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.8 features -================= + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== .. _showcase: @@ -38,14 +41,17 @@ Iris 1.8 features .. admonition:: Showcase: Slices over a coordinate - You can slice over one or more dimensions of a cube using :meth:`iris.cube.Cube.slices_over`. - This provides similar functionality to :meth:`~iris.cube.Cube.slices` but with - almost the opposite outcome. + You can slice over one or more dimensions of a cube using + :meth:`iris.cube.Cube.slices_over`. + This provides similar functionality to :meth:`~iris.cube.Cube.slices` + but with almost the opposite outcome. - Using :meth:`~iris.cube.Cube.slices` to slice a cube on a selected dimension returns - all possible slices of the cube with the selected dimension retaining its dimensionality. - Using :meth:`~iris.cube.Cube.slices_over` to slice a cube on a selected - dimension returns all possible slices of the cube over the selected dimension. + Using :meth:`~iris.cube.Cube.slices` to slice a cube on a selected + dimension returns all possible slices of the cube with the selected + dimension retaining its dimensionality. Using + :meth:`~iris.cube.Cube.slices_over` to slice a cube on a selected + dimension returns all possible slices of the cube over the selected + dimension. To demonstrate this:: @@ -60,42 +66,65 @@ Iris 1.8 features air_potential_temperature / (K) (model_level_number: 10; grid_latitude: 83; grid_longitude: 83) -* :func:`iris.cube.CubeList.concatenate` now works with `biggus `_ arrays and so +* :func:`iris.cube.CubeList.concatenate` now works with + `biggus `_ arrays and so now supports concatenation of cubes with deferred data. + * Improvements to NetCDF saving through using biggus: * A cube's lazy data payload will still be lazy after saving; the data will not be loaded into memory by the save operation. + * Cubes with data payloads larger than system memory can now be saved to NetCDF through biggus streaming the data to disk. -* :func:`iris.util.demote_dim_coord_to_aux_coord` and :func:`iris.util.promote_aux_coord_to_dim_coord` +* :func:`iris.util.demote_dim_coord_to_aux_coord` and + :func:`iris.util.promote_aux_coord_to_dim_coord` allow a coordinate to be easily demoted or promoted within a cube. -* :func:`iris.util.squeeze` removes all length 1 dimensions from a cube, and demotes - any associated squeeze dimension :class:`~iris.coords.DimCoord` to be a scalar coordinate. -* :meth:`iris.cube.Cube.slices_over`, which returns an iterator of all sub-cubes along a given - coordinate or dimension index. + +* :func:`iris.util.squeeze` removes all length 1 dimensions from a cube, and + demotes any associated squeeze dimension :class:`~iris.coords.DimCoord` to be + a scalar coordinate. + +* :meth:`iris.cube.Cube.slices_over`, which returns an iterator of all + sub-cubes along a given coordinate or dimension index. + * :meth:`iris.cube.Cube.interpolate` now accepts datetime.datetime and netcdftime.datetime instances for date or time coordinates. -* Many new and updated translations between CF spec and STASH codes or GRIB2 parameter - codes. -* PP/FF loader creates a height coordinate at 1.5m or 10m for certain relevant stash codes. -* Lazy aggregator support for the :class:`standard deviation ` - aggregator has been added. -* A speed improvement in calculation of :func:`iris.analysis.cartography.area_weights`. -* Experimental support for unstructured grids has been added with :func:`iris.experimental.ugrid`. - This has been implemented using `UGRID `_. -* :meth:`iris.cube.CubeList.extract_overlapping` supports extraction of cubes over - regions where common coordinates overlap, over multiple coordinates. + +* Many new and updated translations between CF spec and STASH codes or GRIB2 + parameter codes. + +* PP/FF loader creates a height coordinate at 1.5m or 10m for certain relevant + stash codes. + +* Lazy aggregator support for the + :class:`standard deviation ` aggregator has been added. + +* A speed improvement in calculation of + :func:`iris.analysis.cartography.area_weights`. + +* Experimental support for unstructured grids has been added with + :func:`iris.experimental.ugrid`. This has been implemented using + `UGRID `_. + +* :meth:`iris.cube.CubeList.extract_overlapping` supports extraction of cubes + over regions where common coordinates overlap, over multiple coordinates. + * Warnings raised due to invalid units in loaded data have been suppressed. -* Experimental low-level read and write access for FieldsFile variants is now supported - via :class:`iris.experimental.um.FieldsFileVariant`. + +* Experimental low-level read and write access for FieldsFile variants is now + supported via :class:`iris.experimental.um.FieldsFileVariant`. + * PP loader will return cubes for all fields prior to a field with a problematic header before raising an exception. -* NetCDF loader skips invalid global attributes, raising a warning rather than raising an - exception. + +* NetCDF loader skips invalid global attributes, raising a warning rather than + raising an exception. + * A warning is now raised rather than an exception when constructing an :class:`~iris.aux_factory.AuxCoordFactory` fails. + * Supported :class:`aux coordinate factories ` have been extended to include: @@ -104,78 +133,104 @@ Iris 1.8 features * ``ocean s coordinate, generic form 1``, and * ``ocean s coordinate, generic form 2``. -* :meth:`iris.cube.Cube.intersection` now supports taking a points-only intersection. - Any bounds on intersected coordinates are ignored but retained. +* :meth:`iris.cube.Cube.intersection` now supports taking a points-only + intersection. Any bounds on intersected coordinates are ignored but retained. + * The FF loader's known handled grids now includes ``Grid 21``. -* A :class:`nearest neighbour ` scheme is now provided for - :meth:`iris.cube.Cube.interpolate` and :meth:`iris.cube.Cube.regrid`. -* :func:`iris.analysis.cartography.rotate_winds` supports transformation of wind vectors - to a different coordinate system. + +* A :class:`nearest neighbour ` scheme is now provided + for :meth:`iris.cube.Cube.interpolate` and :meth:`iris.cube.Cube.regrid`. + +* :func:`iris.analysis.cartography.rotate_winds` supports transformation of + wind vectors to a different coordinate system. + * NumPy universal functions can now be applied to cubes using :func:`iris.analysis.maths.apply_ufunc`. + * Generic functions can be applied to :class:`~iris.cube.Cube` instances using :class:`iris.analysis.maths.IFunc`. -* The :class:`iris.analysis.Linear` scheme now supports regridding as well as interpolation. - This enables :meth:`iris.cube.Cube.regrid` to perform bilinear regridding, which now - replaces the experimental routine "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid". + +* The :class:`iris.analysis.Linear` scheme now supports regridding as well as + interpolation. This enables :meth:`iris.cube.Cube.regrid` to perform bilinear + regridding, which now replaces the experimental routine + "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid". + Bugs fixed ========== -1.8.0 ------- * Fix in netCDF loader to correctly determine whether the longitude coordinate (including scalar coordinates) is circular. -* :meth:`iris.cube.Cube.intersection` now supports bounds that extend slightly beyond 360 - degrees. -* Lateral Boundary Condition (LBC) type FieldFiles are now handled correctly by the FF loader. -* Making a copy of a scalar cube with no data now correctly copies the data array. -* Height coordinates in NAME trajectory output files have been changed to match other - NAME output file formats. + +* :meth:`iris.cube.Cube.intersection` now supports bounds that extend slightly + beyond 360 degrees. + +* Lateral Boundary Condition (LBC) type FieldFiles are now handled correctly by + the FF loader. + +* Making a copy of a scalar cube with no data now correctly copies the data + array. + +* Height coordinates in NAME trajectory output files have been changed to match + other NAME output file formats. + * Fixed datatype when loading an ``integer_constants`` array from a FieldsFile. + * FF/PP loader adds appropriate cell methods for ``lbtim.ib = 3`` intervals. + * An exception is raised if the units of the latitude and longitude coordinates of the cube passed into :func:`iris.analysis.cartography.area_weights` are not convertible to radians. + * GRIB1 loader now creates a time coordinate for a time range indicator of 2. + * NetCDF loader now loads units that are empty strings as dimensionless. -1.8.1 ------- -* The PP loader now carefully handles floating point errors in date time conversions to hours. -* The handling fill values for lazy data loaded from NetCDF files is altered, such that the - _FillValue set in the file is preserved through lazy operations. -* The risk that cube intersections could return incorrect results due to floating point - tolerances is reduced. -* The new GRIB2 loading code is altered to enable the loading of various data representation - templates; the data value unpacking is handled by the GRIB API. -* Saving cube collections to NetCDF, where multiple similar aux-factories exist within the cubes, - is now carefully handled such that extra file variables are created where required in some cases. - -1.8.2 ------ -* A fix to prevent the error: *AttributeError: 'module' object has no attribute 'date2num'*. - This was caused by the function :func:`netcdftime.date2num` being removed from the netCDF4 - package in recent versions. + +v1.8.1 (03 Jun 2015) +-------------------- + +* The PP loader now carefully handles floating point errors in date time + conversions to hours. + +* The handling fill values for lazy data loaded from NetCDF files is altered, + such that the _FillValue set in the file is preserved through lazy operations. + +* The risk that cube intersections could return incorrect results due to + floating point tolerances is reduced. + +* The new GRIB2 loading code is altered to enable the loading of various data + representation templates; the data value unpacking is handled by the GRIB API. + +* Saving cube collections to NetCDF, where multiple similar aux-factories exist + within the cubes, is now carefully handled such that extra file variables are + created where required in some cases. + Deprecations ============ + * The original GRIB loader has been deprecated and replaced with a new template-based GRIB loader. + * Deprecated default NetCDF save behaviour of assigning the outermost dimension to be unlimited. Switch to the new behaviour with no auto assignment by setting :data:`iris.FUTURE.netcdf_no_unlimited` to True. + * The former experimental method - "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid" has been removed, as - :class:`iris.analysis.Linear` now includes this functionality. - -Documentation Changes -===================== -* A chapter on :doc:`merge and concatenate ` has been - added to the :doc:`user guide `. -* A section on installing Iris using `conda `_ has been - added to the :doc:`install guide `. + "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid" has been + removed, as :class:`iris.analysis.Linear` now includes this functionality. + + +Documentation +============= + +* A chapter on :doc:`merge and concatenate ` has + been added to the :doc:`user guide `. + +* A section on installing Iris using `conda `_ has + been added to the :doc:`install guide `. + * Updates to the chapter on :doc:`regridding and interpolation ` have been added to the :doc:`user guide `. - diff --git a/docs/iris/src/whatsnew/1.9.rst b/docs/iris/src/whatsnew/1.9.rst index 7a4848b434..77d03b5de3 100644 --- a/docs/iris/src/whatsnew/1.9.rst +++ b/docs/iris/src/whatsnew/1.9.rst @@ -1,32 +1,48 @@ -What's New in Iris 1.9 -********************** +v1.9 (10 Dec 2015) +******************** -:Release: 1.9.2 -:Date: 28th January 2016 - -This document explains the new/changed features of Iris in version 1.9 +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 1.9 Features -================= -* Support for running on Python 3.4 has been added to the whole code base. Some features which - depend on external libraries will not be available until they also support Python 3, namely: + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== + +* Support for running on Python 3.4 has been added to the whole code base. + Some features which depend on external libraries will not be available until + they also support Python 3, namely: * gribapi does not yet provide a Python 3 interface -* Added the UM pseudo level type to the information made available in the STASH_TRANS table in :mod:`iris.fileformats.um._ff_cross_references` -* When reading "cell_methods" attributes from NetCDF files, allow optional whitespace before the colon. - This is not strictly in the CF spec, but is a common occurrence. -* Basic cube arithemetic (plus, minus, times, divide) now supports lazy evaluation. -* :meth:`iris.analysis.cartography.rotate_winds` can now operate much faster on multi-layer (i.e. > 2-dimensional) cubes, - as it calculates rotation coefficients only once and reuses them for additional layers. +* Added the UM pseudo level type to the information made available in the + STASH_TRANS table in :mod:`iris.fileformats.um._ff_cross_references` + +* When reading "cell_methods" attributes from NetCDF files, allow optional + whitespace before the colon. This is not strictly in the CF spec, but is a + common occurrence. -* Linear regridding of a multi-layer (i.e. > 2-dimensional) cube is now much faster, - as it calculates transform coefficients just once and reuses them for additional layers. -* Ensemble statistics can now be saved to GRIB2, using Product Definition Template 4.11. +* Basic cube arithmetic (plus, minus, times, divide) now supports lazy + evaluation. -* Loading of NetCDF data with ocean vertical coordinates now returns a 'depth' in addition to an 'eta' cube. - This operates on specific defined dimensionless coordinates : see CF spec version 1.6, Appendix D. +* :meth:`iris.analysis.cartography.rotate_winds` can now operate much faster + on multi-layer (i.e. > 2-dimensional) cubes, as it calculates rotation + coefficients only once and reuses them for additional layers. + +* Linear regridding of a multi-layer (i.e. > 2-dimensional) cube is now much + faster, as it calculates transform coefficients just once and reuses them for + additional layers. + +* Ensemble statistics can now be saved to GRIB2, using Product Definition + Template 4.11. + +* Loading of NetCDF data with ocean vertical coordinates now returns a 'depth' + in addition to an 'eta' cube. This operates on specific defined + dimensionless coordinates : see CF spec version 1.6, Appendix D. * :func:`iris.analysis.stats.pearsonr` updates: @@ -37,14 +53,21 @@ Iris 1.9 Features * Accepts common_mask keyword for restricting calculation to unmasked pairs of cells. -* Added a new point-in-cell regridding scheme, :class:`iris.experimental.regrid.PointInCell`. -* Added :meth:`iris.analysis.WPERCENTILE` - a new weighted aggregator for calculating - percentiles. -* Added cell-method translations for LBPROC=64 and 192 in UM files, encoding 'zonal mean' and 'zonal+time mean'. +* Added a new point-in-cell regridding scheme, + :class:`iris.experimental.regrid.PointInCell`. + +* Added :meth:`iris.analysis.WPERCENTILE` - a new weighted aggregator for + calculating percentiles. + +* Added cell-method translations for LBPROC=64 and 192 in UM files, encoding + 'zonal mean' and 'zonal+time mean'. + +* Support for loading GRIB2 messages defined on a Lambert conformal grid has + been added to the GRIB2 loader. + +* Data on potential-temperature (theta) levels can now be saved to GRIB2, with + a fixed surface type of 107. -* Support for loading GRIB2 messages defined on a Lambert conformal grid has been added to - the GRIB2 loader. -* Data on potential-temperature (theta) levels can now be saved to GRIB2, with a fixed surface type of 107. * Added several new helper functions for file-save customisation, (see also : :doc:`Saving Iris Cubes `): @@ -54,76 +77,130 @@ Iris 1.9 Features * :meth:`iris.fileformats.pp.as_pairs` * :meth:`iris.fileformats.pp.as_fields` * :meth:`iris.fileformats.pp.save_fields` -* Loading data from GRIB2 now supports most of the currently defined 'data representation templates' : - code numbers 0, 1, 2, 3, 4, 40, 41, 50, 51 and 61. -* When a Fieldsfile is opened for update as a :class:`iris.experimental.um.FieldsFileVariant`, - unmodified packed data in the file can now be retained in the original form. - Previously it could only be stored in an unpacked form. + +* Loading data from GRIB2 now supports most of the currently defined 'data + representation templates' : code numbers 0, 1, 2, 3, 4, 40, 41, 50, 51 and 61. + +* When a Fieldsfile is opened for update as a + :class:`iris.experimental.um.FieldsFileVariant`, unmodified packed data in + the file can now be retained in the original form. Previously it could only + be stored in an unpacked form. + * When reading and writing NetCDF data, the CF 'flag' attributes, - "flag_masks", "flag_meanings" and "flag_values" are now preserved through Iris load and save. -* `mo_pack `_ was added as an optional dependency. + "flag_masks", "flag_meanings" and "flag_values" are now preserved through + Iris load and save. + +* `mo_pack `_ was added as an optional + dependency. It is used to encode and decode data in WGDOS packed form. -* The :meth:`iris.experimental.um.Field.get_data` method can now be used to read Fieldsfile data - after the original :class:`iris.experimental.um.FieldsFileVariant` has been closed. -Bugs Fixed +* The :meth:`iris.experimental.um.Field.get_data` method can now be used to + read Fieldsfile data after the original + :class:`iris.experimental.um.FieldsFileVariant` has been closed. + +Bugs fixed ========== + * Fixed a bug in :meth:`iris.unit.Unit.convert` (and the equivalent in `cf_units `_) - so that it now converts data to the native endianness, without which udunits could not read it correctly. + so that it now converts data to the native endianness, without which udunits + could not read it correctly. + * Fixed a bug with loading WGDOS packed data in :mod:`iris.experimental.um`, which could occasionally crash, with some data. -* Ignore non-numeric suffices in the numpy version string, which would otherwise crash some regridding routines. + +* Ignore non-numeric suffices in the numpy version string, which would + otherwise crash some regridding routines. + * fixed a bug in :mod:`iris.fileformats.um_cf_map` where the standard name - for the stash code m01s12i187 was incorrectly set, such that it is inconsistent - with the stated unit of measure, 'm s-1'. The different name, a long_name - of 'change_over_time_in_upward_air_velocity_due_to_advection' with + for the stash code m01s12i187 was incorrectly set, such that it is + inconsistent with the stated unit of measure, 'm s-1'. The different name, + a long_name of 'change_over_time_in_upward_air_velocity_due_to_advection' with units of 'm s-1' is now used instead. + * Fixed a bug in :meth:`iris.cube.Cube.intersection`. - When edge points were at (base + period), intersection would unnecessarily wrap the data. + When edge points were at (base + period), intersection would unnecessarily + wrap the data. + * Fixed a bug in :mod:`iris.fileformats.pp`. - A previous release removed the ability to pass a partial constraint on STASH attribute. -* :meth:`iris.plot.default_projection_extent` now correctly raises an exception if a cube has X bounds but no Y bounds, or vice versa. - Previously it never failed this, as the test was wrong. -* When loading NetCDF data, a "units" attribute containing unicode characters is now transformed by backslash-replacement. - Previously this caused a crash. Note: unicode units are *not supported in the CF conventions*. -* When saving to NetCDF, factory-derived auxiliary coordinates are now correctly saved with different names when they are not identical. - Previously, such coordinates could be saved with the same name, leading to errors. + A previous release removed the ability to pass a partial constraint on STASH + attribute. + +* :meth:`iris.plot.default_projection_extent` now correctly raises an exception + if a cube has X bounds but no Y bounds, or vice versa. Previously it never + failed this, as the test was wrong. + +* When loading NetCDF data, a "units" attribute containing unicode characters + is now transformed by backslash-replacement. Previously this caused a crash. + Note: unicode units are *not supported in the CF conventions*. + +* When saving to NetCDF, factory-derived auxiliary coordinates are now correctly + saved with different names when they are not identical. Previously, such + coordinates could be saved with the same name, leading to errors. + * Fixed a bug in :meth:`iris.experimental.um.FieldsFileVariant.close`, which now correctly allocates extra blocks for larger lookups when saving. - Previously, when larger files open for update were closed, they could be written out with data overlapping the lookup table. + Previously, when larger files open for update were closed, they could be + written out with data overlapping the lookup table. + * Fixed a bug in :class:`iris.aux_factory.OceanSigmaZFactory` - which sometimes caused crashes when fetching the points of an "ocean sigma z" coordinate. + which sometimes caused crashes when fetching the points of an "ocean sigma z" + coordinate. + -Version 1.9.1 -------------- -* Fixed a unicode bug preventing standard names from being built cleanly when installing in Python3 +v1.9.1 (05 Jan 2016) +-------------------- + +* Fixed a unicode bug preventing standard names from being built cleanly when + installing in Python3 + + +v1.9.2 (28 Jan 2016) +-------------------- + +* New warning regarding data loss if writing to an open file which is also + open to read, with lazy data. -Version 1.9.2 -------------- -* New warning regarding data loss if writing to an open file which is also open to read, with lazy data. * Removal of a warning about data payload loading from concatenate. + * Updates to concatenate documentation. + * Fixed a bug with a name change in the netcdf4-python package. + * Fixed a bug building the documentation examples. -* Fixed a bug avoiding sorting classes directly when :meth:`iris.cube.Cube.coord_system` is used in Python3. + +* Fixed a bug avoiding sorting classes directly when + :meth:`iris.cube.Cube.coord_system` is used in Python3. + * Fixed a bug regarding unsuccessful dot import. -Incompatible Changes + +Incompatible changes ==================== -* GRIB message/file reading and writing may not be available for Python 3 due to GRIB API limitations. + +* GRIB message/file reading and writing may not be available for Python 3 due + to GRIB API limitations. + Deprecations ============ -* Deprecated :mod:`iris.unit`, with unit functionality provided by `cf_units `_ instead. -* When loading from NetCDF, a deprecation warning is emitted if there is vertical coordinate information - that *would* produce extra result cubes if :data:`iris.FUTURE.netcdf_promote` were set, - but it is *not* set. + +* Deprecated :mod:`iris.unit`, with unit functionality provided by + `cf_units `_ instead. + +* When loading from NetCDF, a deprecation warning is emitted if there is + vertical coordinate information that *would* produce extra result cubes if + :data:`iris.FUTURE.netcdf_promote` were set, but it is *not* set. + * Deprecated :class:`iris.aux_factory.LazyArray` -Documentation Changes -===================== + +Documentation +============= + * A chapter on :doc:`saving iris cubes ` has been added to the :doc:`user guide `. -* Added script and documentation for building a what's new page from developer-submitted contributions. - See :doc:`Contributing a "What's New" entry `. + +* Added script and documentation for building a what's new page from + developer-submitted contributions. See + :doc:`Contributing a "What's New" entry `. diff --git a/docs/iris/src/whatsnew/2.0.rst b/docs/iris/src/whatsnew/2.0.rst index 43d60a8539..577e8fea22 100644 --- a/docs/iris/src/whatsnew/2.0.rst +++ b/docs/iris/src/whatsnew/2.0.rst @@ -1,16 +1,18 @@ -What's New in Iris 2.0.0 -************************ +v2.0 (14 Feb 2018) +****************** -:Release: 2.0.0rc1 -:Date: 2018-01-11 +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) -This document explains the new/changed features of Iris in version 2.0.0 -(:doc:`View all changes `). +.. contents:: Skip to section: + :local: + :depth: 3 -Iris 2.0.0 Features -=================== +Features +======== + .. _showcase: .. admonition:: Dask Integration @@ -114,7 +116,7 @@ all existing toggles in :attr:`iris.FUTURE` now default to :data:`True`. off is now deprecated. -Bugs Fixed +Bugs fixed ========== * Indexing or slicing an :class:`~iris.coords.AuxCoord` coordinate will return a coordinate with @@ -209,8 +211,8 @@ Incompatible Changes printed as ``m.s-1``. -Deprecation removals --------------------- +Deprecation +=========== All deprecated functionality that was announced for removal in Iris 2.0 has been removed. In particular: @@ -289,8 +291,8 @@ been removed. In particular: removed from the :class:`iris.fileformats.rules.Loader` constructor. -Documentation Changes -===================== +Documentation +============= * A new UserGuide chapter on :doc:`Real and Lazy Data ` has been added, and referenced from key diff --git a/docs/iris/src/whatsnew/2.1.rst b/docs/iris/src/whatsnew/2.1.rst index 00f7115431..311e8c251b 100644 --- a/docs/iris/src/whatsnew/2.1.rst +++ b/docs/iris/src/whatsnew/2.1.rst @@ -1,37 +1,17 @@ -What's New in Iris 2.1 -********************** +v2.1 (06 Jun 2018) +****************** -:Release: 2.1 -:Date: 2018-06-06 +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) -This document explains the new/changed features of Iris in version 2.1 -(:doc:`older "What's New" release notes can be found here`.) +.. contents:: Skip to section: + :local: + :depth: 3 -Iris 2.1 Dependency updates -=========================== -* The `cf_units `_ dependency - was updated to cf_units ``v2.0``. - cf_units v2 is almost entirely backwards compatible with v1. - However the ability to preserve some aliased calendars has been removed. - For this reason, it is possible that NetCDF load of a variable with a - "standard" calendar will result in a saved NetCDF of a "gregorian" - calendar. -* Iris updated its time-handling functionality from the - `netcdf4-python `_ - ``netcdftime`` implementation to the standalone module - `cftime `_. - cftime is entirely compatible with netcdftime, but some issues may - occur where users are constructing their own datetime objects. - In this situation, simply replacing ``netcdftime.datetime`` with - ``cftime.datetime`` should be sufficient. -* Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy. - Full requirements can be seen in the `requirements `_ - directory of the Iris' the source. - -Iris 2.1 Features -================= +Features +======== * Added ``repr_html`` functionality to the :class:`~iris.cube.Cube` to provide a rich html representation of cubes in Jupyter notebooks. Existing functionality @@ -42,42 +22,81 @@ Iris 2.1 Features * Updated :func:`iris.cube.Cube.name` to return a STASH code if the cube has one and no other valid names are present. This is now consistent with the summary information from :func:`iris.cube.Cube.summary`. + * The partial collapse of multi-dimensional auxiliary coordinates is now supported. Collapsed bounds span the range of the collapsed dimension(s). + * Added new function :func:`iris.cube.CubeList.realise_data` to compute multiple lazy values in a single operation, avoiding repeated re-loading of data or re-calculation of expressions. + * The methods :meth:`iris.cube.Cube.convert_units` and :meth:`iris.coords.Coord.convert_units` no longer forcibly realise the cube data or coordinate points/bounds. The converted values are now lazy arrays if the originals were. + * Added :meth:`iris.analysis.trajectory.interpolate` that allows you to interpolate to find values along a trajectory. + * It is now possible to add an attribute of ``missing_value`` to a cube (:issue:`1588`). + * Iris can now represent data on the Albers Equal Area Projection, and the NetCDF loader and saver were updated to handle this. (:issue:`2943`) + * The :class:`~iris.coord_systems.Mercator` projection has been updated to accept the ``standard_parallel`` keyword argument (:pull:`3041`). -Bugs Fixed + +Bugs fixed ========== * All var names being written to NetCDF are now CF compliant. Non alpha-numeric characters are replaced with '_', and var names now always have a leading letter (:pull:`2930`). + * A cube resulting from a regrid operation using the `iris.analysis.AreaWeighted` regridding scheme will now have the smallest floating point data type to which the source cube's data type can be safely converted using NumPy's type promotion rules. + * :mod:`iris.quickplot` labels now honour the axes being drawn to when using the ``axes`` keyword (:pull:`3010`). -Incompatible Changes + +Incompatible changes ==================== + * The deprecated :mod:`iris.experimental.um` was removed. Please use consider using `mule `_ as an alternative. + * This release of Iris contains a number of updated metadata translations. - See [this changelist](https://github.com/SciTools/iris/commit/69597eb3d8501ff16ee3d56aef1f7b8f1c2bb316#diff-1680206bdc5cfaa83e14428f5ba0f848) + See this + `changelist `_ for further information. + + +Internal +======== + +* The `cf_units `_ dependency + was updated to cf_units ``v2.0``. + cf_units v2 is almost entirely backwards compatible with v1. + However the ability to preserve some aliased calendars has been removed. + For this reason, it is possible that NetCDF load of a variable with a + "standard" calendar will result in a saved NetCDF of a "gregorian" + calendar. + +* Iris updated its time-handling functionality from the + `netcdf4-python `_ + ``netcdftime`` implementation to the standalone module + `cftime `_. + cftime is entirely compatible with netcdftime, but some issues may + occur where users are constructing their own datetime objects. + In this situation, simply replacing ``netcdftime.datetime`` with + ``cftime.datetime`` should be sufficient. + +* Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy. + Full requirements can be seen in the `requirements `_ + directory of the Iris' the source. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/2.2.rst b/docs/iris/src/whatsnew/2.2.rst index 1eff99ecb4..314f84355f 100644 --- a/docs/iris/src/whatsnew/2.2.rst +++ b/docs/iris/src/whatsnew/2.2.rst @@ -1,17 +1,18 @@ -What's New in Iris 2.2 -************************ +v2.2 (11 Oct 2018) +****************** -:Release: 2.2.0 -:Date: +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) -This document explains the new/changed features of Iris in the release -of version 2.2 -(:doc:`View all changes `). +.. contents:: Skip to section: + :local: + :depth: 3 -Iris 2.2 Features -=================== +Features +======== + .. _showcase: .. admonition:: 2-Dimensional Coordinate Plotting @@ -70,19 +71,7 @@ Iris 2.2 Features a NaN-tolerant array comparison. -Iris 2.2 Dependency updates -============================= - -* Iris is now using the latest version release of dask (currently 0.19.3) - -* Proj4 has been temporarily pinned to version < 5 while problems with the - Mollweide projection are addressed. - -* Matplotlib has been pinned to version < 3 temporarily while we account for - its changes in all SciTools libraries. - - -Bugs Fixed +Bugs fixed ========== * The bug has been fixed that prevented printing time coordinates with bounds @@ -93,7 +82,7 @@ Bugs Fixed bound data is actually masked. -Bugs fixed in v2.2.1 +v2.2.1 (28 May 2019) -------------------- * Iris can now correctly unpack a column of header objects when saving a @@ -108,9 +97,20 @@ Bugs fixed in v2.2.1 floating-point arithmetic. +Internal +======== + +* Iris is now using the latest version release of dask (currently 0.19.3) + +* Proj4 has been temporarily pinned to version < 5 while problems with the + Mollweide projection are addressed. + +* Matplotlib has been pinned to version < 3 temporarily while we account for + its changes in all SciTools libraries. + -Documentation Changes -===================== +Documentation +============= * Iris' `INSTALL` document has been updated to include guidance for running tests. diff --git a/docs/iris/src/whatsnew/2.3.rst b/docs/iris/src/whatsnew/2.3.rst index 872fb44cd6..914d86fda2 100644 --- a/docs/iris/src/whatsnew/2.3.rst +++ b/docs/iris/src/whatsnew/2.3.rst @@ -1,14 +1,18 @@ -What's New in Iris 2.3.0 -************************ +v2.3 (19 Dec 2019) +****************** -:Release: 2.3.0 -:Date: 2019-12-19 - -This document explains the new/changed features of Iris in version 2.3.0 +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 2.3.0 Features -=================== + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== + .. _showcase: .. admonition:: Support for CF 1.7 @@ -81,7 +85,7 @@ Iris 2.3.0 Features previously could produce a large number of small chunks. This had an adverse effect on performance. - In addition, Iris now takes its default chunksize from the default configured + In addition, Iris now takes its default chunk size from the default configured in Dask itself, i.e. ``dask.config.get('array.chunk-size')``. .. admonition:: Lazy Statistics @@ -103,119 +107,158 @@ Iris 2.3.0 Features relaxed tolerance : This means that some cubes may now test 'equal' that previously did not. - Previously, Iris compared cube data arrays using: - ``abs(a - b) < 1.e-8`` + Previously, Iris compared cube data arrays using + ``abs(a - b) < 1.e-8`` We now apply the default operation of :func:`numpy.allclose` instead, - which is equivalent to: - ``abs(a - b) < (1.e-8 + 1.e-5 * b)`` + which is equivalent to + ``abs(a - b) < (1.e-8 + 1.e-5 * b)`` * Added support to render HTML for :class:`~iris.cube.CubeList` in Jupyter Notebooks and JupyterLab. + * Loading CellMeasures with integer values is now supported. + * New coordinate system: :class:`iris.coord_systems.Geostationary`, including load and save support, based on the `CF Geostationary projection definition `_. + * :class:`iris.coord_systems.VerticalPerspective` can now be saved to and loaded from NetCDF files. + * :class:`iris.experimental.regrid.PointInCell` moved to :class:`iris.analysis.PointInCell` to make this regridding scheme public -* Iris now supports standard name modifiers. See `Appendix C, Standard Name Modifiers `_ for more information. + +* Iris now supports standard name modifiers. See + `Appendix C, Standard Name Modifiers `_ + for more information. + * :meth:`iris.cube.Cube.remove_cell_measure` now also allows removal of a cell measure by its name (previously only accepted a CellMeasure object). + * The :data:`iris.analysis.RMS` aggregator now supports a lazy calculation. However, the "weights" keyword is not currently supported by this, so a *weighted* calculation will still return a realised result, *and* force realisation of the original cube data. -* Iris now supports NetCDF Climate and Forecast (CF) Metadata Conventions 1.7 (see `CF 1.7 Conventions Document `_ for more information) + +* Iris now supports NetCDF Climate and Forecast (CF) Metadata Conventions 1.7 + (see `CF 1.7 Conventions Document `_ for more information) + * Updated standard name support to `CF standard name table version 70, 2019-12-10 `_ + * Updated UM STASH translations to `metarelate/metOcean commit 448f2ef, 2019-11-29 `_ -Iris 2.3.0 Dependency Updates -============================= -* Iris now supports Proj4 up to version 5, but not yet 6 or beyond, pending - `fixes to some cartopy tests `_. -* Iris now requires Dask >= 1.2 to allow for improved coordinate equality - checks. - - -Bugs Fixed +Bugs fixed ========== + * Cube equality of boolean data is now handled correctly. + * Fixed a bug where cell measures were incorrect after a cube :meth:`~iris.cube.Cube.transpose` operation. Previously, this resulted in cell-measures that were no longer correctly mapped to the cube dimensions. -* The :class:`~iris.coords.AuxCoord` disregarded masked points and bounds, as did the :class:`~iris.coords.DimCoord`. - Fix permits an :class:`~iris.coords.AuxCoord` to contain masked points/bounds, and a TypeError exception is now - raised when attempting to create or set the points/bounds of a - :class:`~iris.coords.DimCoord` with arrays with missing points. + +* The :class:`~iris.coords.AuxCoord` disregarded masked points and bounds, as + did the :class:`~iris.coords.DimCoord`. Fix permits an + :class:`~iris.coords.AuxCoord` to contain masked points/bounds, and a + TypeError exception is now raised when attempting to create or set the + points/bounds of a :class:`~iris.coords.DimCoord` with arrays with missing + points. + * :class:`iris.coord_systems.VerticalPerspective` coordinate system now uses the `CF Vertical perspective definition `_; had been erroneously using Geostationary. -* :class:`~iris.coords.CellMethod` will now only use valid `NetCDF name tokens `_ to reference the coordinates involved in the statistical operation. -* The following var_name properties will now only allow valid `NetCDF name - tokens - `_ to - reference the said NetCDF variable name. Note that names with a leading + +* :class:`~iris.coords.CellMethod` will now only use valid + `NetCDF name tokens`_ to reference the coordinates involved in the + statistical operation. + +* The following var_name properties will now only allow valid + `NetCDF name tokens`_ + to reference the said NetCDF variable name. Note that names with a leading underscore are not permitted. - - :attr:`iris.aux_factory.AuxCoordFactory.var_name` - - :attr:`iris.coords.CellMeasure.var_name` - - :attr:`iris.coords.Coord.var_name` - - :attr:`iris.coords.AuxCoord.var_name` - - :attr:`iris.cube.Cube.var_name` + +.. _NetCDF name tokens: https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_data_set_components.html#object_name + + * :attr:`iris.aux_factory.AuxCoordFactory.var_name` + * :attr:`iris.coords.CellMeasure.var_name` + * :attr:`iris.coords.Coord.var_name` + * :attr:`iris.coords.AuxCoord.var_name` + * :attr:`iris.cube.Cube.var_name` + * Rendering a cube in Jupyter will no longer crash for a cube with attributes containing ``\n``. + * NetCDF variables which reference themselves in their ``cell_measures`` attribute can now be read. + * :func:`~iris.plot.quiver` now handles circular coordinates. + * The names of cubes loaded from abf/abl files have been corrected. + * Fixed a bug in UM file loading, where any landsea-mask-compressed fields (i.e. with LBPACK=x2x) would cause an error later, when realising the data. + * :meth:`iris.cube.Cube.collapsed` now handles partial collapsing of multidimensional coordinates that have bounds. + * Fixed a bug in the :data:`~iris.analysis.PROPORTION` aggregator, where cube data in the form of a masked array with ``array.mask=False`` would cause an error, but possibly only later when the values are actually realised. ( Note: since netCDF4 version 1.4.0, this is now a common form for data loaded from netCDF files ). + * Fixed a bug where plotting a cube with a :class:`iris.coord_systems.LambertConformal` coordinate system would result in an error. This would happen if the coordinate system was defined with one standard parallel, rather than two. In these cases, a call to :meth:`~iris.coord_systems.LambertConformal.as_cartopy_crs` would fail. + * :meth:`iris.cube.Cube.aggregated_by` now gives correct values in points and bounds when handling multidimensional coordinates. + * Fixed a bug in the :meth:`iris.cube.Cube.collapsed` operation, which caused the unexpected realization of any attached auxiliary coordinates that were *bounded*. It now correctly produces a lazy result and does not realise the original attached AuxCoords. -Documentation Changes -===================== +Internal +======== + +* Iris now supports Proj4 up to version 5, but not yet 6 or beyond, pending + `fixes to some cartopy tests `_. + +* Iris now requires Dask >= 1.2 to allow for improved coordinate equality + checks. + + +Documentation +============= + * Adopted a - `new colour logo for Iris <../_static/Iris7_1_trim_full.png>`_ -* Added a gallery example showing `how to concatenate NEMO ocean model data - <../examples/Oceanography/load_nemo.html>`_. -* Added an example in the - `Loading Iris Cubes: Constraining on Time <../userguide/loading_iris_cubes - .html#constraining-on-time>`_ - Userguide section, demonstrating how to load data within a specified date + `new colour logo for Iris `_ + +* Added a gallery example showing how to concatenate NEMO ocean model data, + see :ref:`sphx_glr_generated_gallery_oceanography_plot_load_nemo.py`. + +* Added an example for loading Iris cubes for :ref:`using-time-constraints` + in the user guide, demonstrating how to load data within a specified date range. + * Added notes to the :func:`iris.load` documentation, and the userguide - `Loading Iris Cubes <../userguide/loading_iris_cubes.html>`_ - chapter, emphasizing that the *order* of the cubes returned by an iris load - operation is effectively random and unstable, and should not be relied on. + :ref:`loading_iris_cubes` chapter, emphasizing that the *order* of the cubes + returned by an iris load operation is effectively random and unstable, and + should not be relied on. + * Fixed references in the documentation of - :func:`iris.util.find_discontiguities` to a nonexistent + :func:`iris.util.find_discontiguities` to a non existent "mask_discontiguities" routine : these now refer to :func:`~iris.util.mask_cube`. diff --git a/docs/iris/src/whatsnew/2.4.rst b/docs/iris/src/whatsnew/2.4.rst index 2facb97a7a..ca7be20cd8 100644 --- a/docs/iris/src/whatsnew/2.4.rst +++ b/docs/iris/src/whatsnew/2.4.rst @@ -1,23 +1,25 @@ -What's New in Iris 2.4.0 -************************ +v2.4 (20 Feb 2020) +****************** -:Release: 2.4.0 -:Date: 2020-02-20 - -This document explains the new/changed features of Iris in version 2.4.0 +This document explains the changes made to Iris for this release (:doc:`View all changes `.) -Iris 2.4.0 Features -=================== +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== .. admonition:: Last python 2 version of Iris - Iris 2.4 is a final extra release of Iris 2, which back-ports specific desired features from - Iris 3 (not yet released). + Iris 2.4 is a final extra release of Iris 2, which back-ports specific + desired features from Iris 3 (not yet released). - The purpose of this is both to support early adoption of certain newer features, - and to provide a final release for Python 2. + The purpose of this is both to support early adoption of certain newer + features, and to provide a final release for Python 2. The next release of Iris will be version 3.0 : a major-version release which introduces breaking API and behavioural changes, and only supports Python 3. @@ -25,35 +27,42 @@ Iris 2.4.0 Features * :class:`iris.coord_systems.Geostationary` can now accept creation arguments of `false_easting=None` or `false_northing=None`, equivalent to values of 0. Previously these kwargs could be omitted, but could not be set to `None`. - This also enables loading of netcdf data on a Geostationary grid, where either of these - keys is not present as a grid-mapping variable property : Previously, loading any - such data caused an exception. -* The area weights used when performing area weighted regridding with :class:`iris.analysis.AreaWeighted` - are now cached. - This allows a significant speedup when regridding multiple similar cubes, by repeatedly using - a `'regridder' object <../iris/iris/analysis.html?highlight=regridder#iris.analysis.AreaWeighted.regridder>`_ + This also enables loading of netcdf data on a Geostationary grid, where + either of these keys is not present as a grid-mapping variable + property : Previously, loading any such data caused an exception. + +* The area weights used when performing area weighted regridding with + :class:`iris.analysis.AreaWeighted` are now cached. This allows a + significant speed up when regridding multiple similar cubes, by repeatedly + using a :func:`iris.analysis.AreaWeighted.regridder` objects which you created first. -* Name constraint matching against cubes during loading or extracting has been relaxed from strictly matching - against the :meth:`~iris.cube.Cube.name`, to matching against either the - ``standard_name``, ``long_name``, NetCDF ``var_name``, or ``STASH`` attributes metadata of a cube. -* Cubes and coordinates now have a new ``names`` property that contains a tuple of the - ``standard_name``, ``long_name``, NetCDF ``var_name``, and ``STASH`` attributes metadata. -* The :class:`~iris.NameConstraint` provides richer name constraint matching when loading or extracting - against cubes, by supporting a constraint against any combination of - ``standard_name``, ``long_name``, NetCDF ``var_name`` and ``STASH`` - from the attributes dictionary of a :class:`~iris.cube.Cube`. - - -Iris 2.4.0 Dependency Updates -============================= -* Iris is now able to use the latest version of matplotlib. +* Name constraint matching against cubes during loading or extracting has been + relaxed from strictly matching against the :meth:`~iris.cube.Cube.name`, to + matching against either the ``standard_name``, ``long_name``, NetCDF + ``var_name``, or ``STASH`` attributes metadata of a cube. + +* Cubes and coordinates now have a new ``names`` property that contains a tuple + of the ``standard_name``, ``long_name``, NetCDF ``var_name``, and ``STASH`` + attributes metadata. -Bugs Fixed +* The :class:`~iris.NameConstraint` provides richer name constraint matching + when loading or extracting against cubes, by supporting a constraint against + any combination of ``standard_name``, ``long_name``, NetCDF ``var_name`` and + ``STASH`` from the attributes dictionary of a :class:`~iris.cube.Cube`. + + +Bugs fixed ========== + * Fixed a problem which was causing file loads to fetch *all* field data whenever UM files (PP or Fieldsfiles) were loaded. - With large sourcefiles, initial file loads are slow, with large memory usage + With large source files, initial file loads are slow, with large memory usage before any cube data is even fetched. Large enough files will cause a crash. The problem occurs only with Dask versions >= 2.0. + +Internal +======== + +* Iris is now able to use the latest version of matplotlib. diff --git a/docs/iris/src/whatsnew/aggregate_directory.py b/docs/iris/src/whatsnew/aggregate_directory.py deleted file mode 100644 index c7b497307f..0000000000 --- a/docs/iris/src/whatsnew/aggregate_directory.py +++ /dev/null @@ -1,337 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Build a release file from files in a contributions directory. - -Looks for directories "<...whatsnew>/contributions_". -Takes specified "xx.xx" as version, or latest found (alphabetic). -Writes a file "<...whatsnew>/.rst". - -Valid contributions filenames are of the form: - __summary.txt -Where can be any valid chars, and - is one of : - "newfeature" "bugfix" "incompatiblechange" "deprecate" "docchange", and - is in the style "2001-Jan-23". - -""" - -import datetime -from glob import glob -import os -import re -import argparse -import warnings -from operator import itemgetter -from distutils import version - -# Regular expressions: CONTRIBUTION_REGEX matches the filenames of -# contribution snippets. It is split into three sections separated by _ -# 0. String for the category. 1. ISO8601 date. 2. String for the feature name. -# RELEASE_REGEX matches the directory names, returning the release. -CONTRIBUTION_REGEX_STRING = r"(?P.*)" -CONTRIBUTION_REGEX_STRING += r"_(?P\d{4}-\w{3}-\d{2})" -CONTRIBUTION_REGEX_STRING += r"_(?P.*)\.txt$" -CONTRIBUTION_REGEX = re.compile(CONTRIBUTION_REGEX_STRING) -RELEASEDIR_PREFIX = r"contributions_" -_RELEASEDIR_REGEX_STRING = RELEASEDIR_PREFIX + r"(?P.*)$" -RELEASE_REGEX = re.compile(_RELEASEDIR_REGEX_STRING) -SOFTWARE_NAME = "Iris" -EXTENSION = ".rst" -VALID_CATEGORIES = [ - {"Prefix": "newfeature", "Title": "Features"}, - {"Prefix": "bugfix", "Title": "Bugs Fixed"}, - {"Prefix": "incompatiblechange", "Title": "Incompatible Changes"}, - {"Prefix": "deprecate", "Title": "Deprecations"}, - {"Prefix": "docchange", "Title": "Documentation Changes"}, -] -VALID_CATEGORY_PREFIXES = [cat["Prefix"] for cat in VALID_CATEGORIES] - - -def _self_root_directory(): - return os.path.abspath(os.path.dirname(__file__)) - - -def _decode_contribution_filename(file_name): - file_name_elements = CONTRIBUTION_REGEX.match(file_name) - category = file_name_elements.group("category") - if category not in VALID_CATEGORY_PREFIXES: - # This is an error - raise ValueError("Unknown category in contribution filename.") - isodate = file_name_elements.group("isodate") - date_of_item = datetime.datetime.strptime(isodate, "%Y-%b-%d").date() - return category, isodate, date_of_item - - -def is_release_directory(directory_name, release): - """Returns True if a given directory name matches the requested release.""" - result = False - directory_elements = RELEASE_REGEX.match(directory_name) - try: - release_string = directory_elements.group("release") - directory_release = version.StrictVersion(release_string) - except (AttributeError, ValueError): - pass - else: - if directory_release == release: - result = True - return result - - -def is_compiled_release(root_directory, release): - """Returns True if the requested release.rst file exists.""" - result = False - compiled_filename = "{!s}{}".format(release, EXTENSION) - compiled_filepath = os.path.join(root_directory, compiled_filename) - if os.path.exists(compiled_filepath) and os.path.isfile(compiled_filepath): - result = True - return result - - -def get_latest_release(root_directory=None): - """ - Implement default=latest release identification. - - Returns a valid release code. - - """ - if root_directory is None: - root_directory = _self_root_directory() - directory_contents = os.listdir(root_directory) - # Default release to latest visible dir. - possible_release_dirs = [ - releasedir_name - for releasedir_name in directory_contents - if RELEASE_REGEX.match(releasedir_name) - ] - if len(possible_release_dirs) == 0: - dirspec = os.path.join(root_directory, RELEASEDIR_PREFIX + "*") - msg = "No valid release directories found, i.e. {!r}." - raise ValueError(msg.format(dirspec)) - release_dirname = sorted(possible_release_dirs)[-1] - release = RELEASE_REGEX.match(release_dirname).group("release") - return release - - -def find_release_directory( - root_directory, release=None, fail_on_existing=True -): - """ - Returns the matching contribution directory or raises an exception. - - Defaults to latest-found release (from release directory names). - Optionally, fail if the matching release file already exists. - *Always* fail if no release directory exists. - - """ - if release is None: - # Default to latest release. - release = get_latest_release(root_directory) - - if fail_on_existing: - compiled_release = is_compiled_release(root_directory, release) - if compiled_release: - msg = ( - "Specified release {!r} is already compiled : " - "{!r} already exists." - ) - compiled_filename = str(release) + EXTENSION - raise ValueError(msg.format(release, compiled_filename)) - - directory_contents = os.listdir(root_directory) - result = None - for inode in directory_contents: - node_path = os.path.join(root_directory, inode) - if os.path.isdir(node_path): - release_directory = is_release_directory(inode, release) - if release_directory: - result = os.path.join(root_directory, inode) - break - if not result: - msg = "Contribution folder for release {!s} does not exist : no {!r}." - release_dirname = RELEASEDIR_PREFIX + str(release) + "/" - release_dirpath = os.path.join(root_directory, release_dirname) - raise ValueError(msg.format(release, release_dirpath)) - return result - - -def generate_header(release, unreleased=False): - """Return a list of text lines that make up a header for the document.""" - if unreleased: - isodatestamp = "" - else: - isodatestamp = datetime.date.today().strftime("%Y-%m-%d") - header_text = [] - title_template = "What's New in {} {!s}\n" - title_line = title_template.format(SOFTWARE_NAME, release) - title_underline = ("*" * (len(title_line) - 1)) + "\n" - header_text.append(title_line) - header_text.append(title_underline) - header_text.append("\n") - header_text.append(":Release: {!s}\n".format(release)) - header_text.append(":Date: {}\n".format(isodatestamp)) - header_text.append("\n") - description_template = ( - "This document explains the new/changed features " - "of {} in version {!s}\n" - ) - header_text.append(description_template.format(SOFTWARE_NAME, release)) - header_text.append("(:doc:`View all changes `.)") - header_text.append("\n") - return header_text - - -def read_directory(directory_path): - """Parse the items in a specified directory and return their metadata.""" - directory_contents = os.listdir(directory_path) - compilable_files_unsorted = [] - misnamed_files = [] - for file_name in directory_contents: - try: - category, isodate, date_of_item = _decode_contribution_filename( - file_name - ) - except (AttributeError, ValueError): - misnamed_files.append(file_name) - continue - compilable_files_unsorted.append( - {"Category": category, "Date": date_of_item, "FileName": file_name} - ) - compilable_files = sorted( - compilable_files_unsorted, key=itemgetter("Date"), reverse=True - ) - if misnamed_files: - msg = "Found contribution file(s) with unexpected names :" - for filename in misnamed_files: - full_path = os.path.join(directory_path, filename) - msg += "\n {}".format(full_path) - warnings.warn(msg, UserWarning) - - return compilable_files - - -def compile_directory(directory, release, unreleased=False): - """Read in source files in date order and compile the text into a list.""" - if unreleased: - release = "" - source_text = read_directory(directory) - compiled_text = [] - header_text = generate_header(release, unreleased) - compiled_text.extend(header_text) - for count, category in enumerate(VALID_CATEGORIES): - category_text = [] - subtitle_line = "" - if count == 0: - subtitle_line += "{} {!s} ".format(SOFTWARE_NAME, release) - subtitle_line += category["Title"] + "\n" - subtitle_underline = ("=" * (len(subtitle_line) - 1)) + "\n" - category_text.append("\n") - category_text.append(subtitle_line) - category_text.append(subtitle_underline) - category_items = [ - item - for item in source_text - if item["Category"] == category["Prefix"] - ] - if not category_items: - continue - for file_description in category_items: - entry_path = os.path.join(directory, file_description["FileName"]) - with open(entry_path, "r") as content_object: - text = content_object.readlines() - if not text[-1].endswith("\n"): - text[-1] += "\n" - category_text.extend(text) - category_text.append("\n----\n\n") - compiled_text.extend(category_text) - return compiled_text - - -def check_all_contributions_valid(release=None, quiet=False, unreleased=False): - """"Scan the contributions directory for badly-named files.""" - root_directory = _self_root_directory() - # Check there are *some* contributions directory(s), else silently pass. - contribs_spec = os.path.join(root_directory, RELEASEDIR_PREFIX + "*") - if len(glob(contribs_spec)) > 0: - # There are some contributions directories: check latest / specified. - if release is None: - release = get_latest_release() - if not quiet: - msg = 'Checking whatsnew contributions for release "{!s}".' - print(msg.format(release)) - release_directory = find_release_directory( - root_directory, release, fail_on_existing=False - ) - # Run the directory scan, but convert any warning into an error. - with warnings.catch_warnings(): - warnings.simplefilter("error") - compile_directory(release_directory, release, unreleased) - if not quiet: - print("done.") - - -def run_compilation(release=None, quiet=False, unreleased=False): - """Write a draft release.rst file given a specified uncompiled release.""" - if release is None: - # This must exist ! - release = get_latest_release() - if not quiet: - msg = 'Building release document for release "{!s}".' - print(msg.format(release)) - root_directory = _self_root_directory() - release_directory = find_release_directory(root_directory, release) - compiled_text = compile_directory(release_directory, release, unreleased) - if unreleased: - compiled_filename = "latest" + EXTENSION - else: - compiled_filename = str(release) + EXTENSION - compiled_filepath = os.path.join(root_directory, compiled_filename) - with open(compiled_filepath, "w") as output_object: - for string_line in compiled_text: - output_object.write(string_line) - if not quiet: - print("done.") - - -if __name__ == "__main__": - PARSER = argparse.ArgumentParser() - PARSER.add_argument( - "release", - help="Release number to be compiled", - nargs="?", - type=version.StrictVersion, - ) - PARSER.add_argument( - "-c", - "--checkonly", - action="store_true", - help="Check contribution file names, do not build.", - ) - PARSER.add_argument( - "-u", - "--unreleased", - action="store_true", - help=( - "Label the release version as '', " - "and its date as ''." - ), - ) - PARSER.add_argument( - "-q", - "--quiet", - action="store_true", - help="Do not print progress messages.", - ) - ARGUMENTS = PARSER.parse_args() - release = ARGUMENTS.release - unreleased = ARGUMENTS.unreleased - quiet = ARGUMENTS.quiet - if ARGUMENTS.checkonly: - check_all_contributions_valid( - release, quiet=quiet, unreleased=unreleased - ) - else: - run_compilation(release, quiet=quiet, unreleased=unreleased) diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Dec-02_cell_measure_concatenate.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Dec-02_cell_measure_concatenate.txt deleted file mode 100644 index 151341d9af..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Dec-02_cell_measure_concatenate.txt +++ /dev/null @@ -1,2 +0,0 @@ -* Concatenating cubes along an axis shared by cell measures would cause concatenation to inappropriately fail. - These cell measures are now concatenated together in the resulting cube. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-14_cell_measure_positional_argument.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-14_cell_measure_positional_argument.txt deleted file mode 100644 index d43b5c2d44..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-14_cell_measure_positional_argument.txt +++ /dev/null @@ -1,4 +0,0 @@ -* A :class:`iris.coords.CellMeasure` requires a string ``measure`` attribute to be defined, which can only have a value - of ``area`` or ``volume``. Previously, the ``measure`` was provided as a keyword argument to - :class:`~iris.coords.CellMeasure` with an default value of ``None``, which caused a ``TypeError`` when no - ``measure`` was provided. The default value of ``area`` is now used. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-19_cell_measure_copy_loss.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-19_cell_measure_copy_loss.txt deleted file mode 100644 index 3a0bbfaf56..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-19_cell_measure_copy_loss.txt +++ /dev/null @@ -1,2 +0,0 @@ -* Copying a cube would previously ignore any attached class:`iris.coords.CellMeasure`. - These are now copied over. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2020-Feb-13_cube_iter_remove.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2020-Feb-13_cube_iter_remove.txt deleted file mode 100644 index 082cd8acc8..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2020-Feb-13_cube_iter_remove.txt +++ /dev/null @@ -1,3 +0,0 @@ -* The `__iter__()` method in class:`iris.cube.Cube` was set to `None`. - `TypeError` is still raised if a `Cube` is iterated over but - `isinstance(cube, collections.Iterable)` now behaves as expected. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-11_remove_LBProc_flag_attributes.txt b/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-11_remove_LBProc_flag_attributes.txt deleted file mode 100644 index 56c1435316..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-11_remove_LBProc_flag_attributes.txt +++ /dev/null @@ -1,2 +0,0 @@ -* :attr:`iris.fileformats.pp.PPField.lbproc` is now an `int`. The - deprecated attributes `flag1`, `flag2` etc. have been removed from it. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-14_remove_deprecated_future_flags.txt b/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-14_remove_deprecated_future_flags.txt deleted file mode 100644 index 3bf515187b..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-14_remove_deprecated_future_flags.txt +++ /dev/null @@ -1,3 +0,0 @@ -* The deprecated :class:`iris.Future` flags `cell_date_time_objects`, - `netcdf_promote`, `netcdf_no_unlimited` and `clip_latitudes` have - been removed. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/docchange_2019-Dec-04_black_code_formatting.txt b/docs/iris/src/whatsnew/contributions_3.0.0/docchange_2019-Dec-04_black_code_formatting.txt deleted file mode 100644 index 500a215bb9..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/docchange_2019-Dec-04_black_code_formatting.txt +++ /dev/null @@ -1,6 +0,0 @@ -* Added support for the `black `_ code formatter. - This is now automatically checked on GitHub PRs, replacing the older, unittest-based - "iris.tests.test_coding_standards.TestCodeFormat". - Black provides automatic code format correction for most IDEs. - See the new developer guide section on this : - https://scitools-docs.github.io/iris/master/developers_guide/code_format.html. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-12_remove_experimental_concatenate_module.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-12_remove_experimental_concatenate_module.txt deleted file mode 100644 index 418377aabc..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-12_remove_experimental_concatenate_module.txt +++ /dev/null @@ -1,3 +0,0 @@ -* The :mod:`iris.experimental.concatenate` module has now been removed. In ``v1.6.0`` the experimental `concatenate` - functionality was moved to the :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the - :func:`iris.experimental.concatenate.concatenate` function raised an exception. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-13_move_experimental_equalise_cubes.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-13_move_experimental_equalise_cubes.txt deleted file mode 100644 index a7ddaa441b..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-13_move_experimental_equalise_cubes.txt +++ /dev/null @@ -1,3 +0,0 @@ -* The :func:`iris.experimental.equalise_cubes.equalise_attributes` function has been moved from the - :mod:`iris.experimental` module into the :mod:`iris.util` module. Please use the :func:`iris.util.equalise_attributes` - function instead. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-26_remove_coord_comparison.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-26_remove_coord_comparison.txt deleted file mode 100644 index a8ba4131d0..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-26_remove_coord_comparison.txt +++ /dev/null @@ -1 +0,0 @@ -* The former function "iris.analysis.coord_comparison" has been removed. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-15_change_default_unit_loading.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-15_change_default_unit_loading.txt new file mode 100644 index 0000000000..be048990f3 --- /dev/null +++ b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-15_change_default_unit_loading.txt @@ -0,0 +1 @@ +* When loading data from netcdf-CF files, where a variable has no "units" property, the corresponding Iris object will have "units='unknown'". Prior to Iris 3.0, these cases defaulted to "units='1'". \ No newline at end of file diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-22_cubelist_extract_cubes.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-22_cubelist_extract_cubes.txt deleted file mode 100644 index ed8e6a8e2c..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-22_cubelist_extract_cubes.txt +++ /dev/null @@ -1,10 +0,0 @@ -* The method :meth:`~iris.cube.CubeList.extract_strict`, and the 'strict' - keyword to :meth:`~iris.cube.CubeList.extract` method have been removed, and - are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube` and - :meth:`~iris.cube.CubeList.extract_cubes`. - The new routines perform the same operation, but in a style more like other - Iris functions such as :meth:`iris.load_cube` and :meth:`iris.load_cubes`. - Unlike 'strict extraction', the type of return value is now completely - consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a cube, - and :meth:`~iris.cube.CubeList.extract_cubes` always returns a CubeList of a - length equal to the number of constraints. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Dec-20_cache_area_weights.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Dec-20_cache_area_weights.txt deleted file mode 100644 index 8c9b7b95d2..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Dec-20_cache_area_weights.txt +++ /dev/null @@ -1,5 +0,0 @@ -* The area weights used when performing area weighted regridding with :class:`iris.analysis.AreaWeighted` - are now cached. - This allows a significant speedup when regridding multiple similar cubes, by repeatedly using - a `'regridder' object <../iris/iris/analysis.html?highlight=regridder#iris.analysis.AreaWeighted.regridder>`_ - which you created first. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-27_cell_measure_statistics.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-27_cell_measure_statistics.txt deleted file mode 100644 index cf8c83e594..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-27_cell_measure_statistics.txt +++ /dev/null @@ -1,5 +0,0 @@ -* Statistical operations :meth:`iris.cube.Cube.collapsed`, - :meth:`iris.cube.Cube.aggregated_by` and :meth:`iris.cube.Cube.rolling_window` - previously removed every :class:`iris.coord.CellMeasure` attached to the cube. - Now, a :class:`iris.coord.CellMeasure` will only be removed if it is associated - with an axis over which the statistic is being run. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-14_cf_ancillary_data.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-14_cf_ancillary_data.txt deleted file mode 100644 index ea70702f38..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-14_cf_ancillary_data.txt +++ /dev/null @@ -1 +0,0 @@ -* CF Ancillary Variables are now supported in cubes. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_nameconstraint.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_nameconstraint.txt deleted file mode 100644 index eeb40990e2..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_nameconstraint.txt +++ /dev/null @@ -1 +0,0 @@ -* The :class:`~iris.NameConstraint` provides richer name constraint matching when loading or extracting against cubes, by supporting a constraint against any combination of ``standard_name``, ``long_name``, NetCDF ``var_name`` and ``STASH`` from the attributes dictionary of a :class:`~iris.cube.Cube`. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_names_property.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_names_property.txt deleted file mode 100644 index a092631152..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_names_property.txt +++ /dev/null @@ -1 +0,0 @@ -* Cubes and coordinates now have a new ``names`` property that contains a tuple of the ``standard_name``, ``long_name``, NetCDF ``var_name``, and ``STASH`` attributes metadata. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_relaxed_name_loading.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_relaxed_name_loading.txt deleted file mode 100644 index 6773ac28b1..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_relaxed_name_loading.txt +++ /dev/null @@ -1 +0,0 @@ -* Name constraint matching against cubes during loading or extracting has been relaxed from strictly matching against the :meth:`~iris.cube.Cube.name`, to matching against either the ``standard_name``, ``long_name``, NetCDF ``var_name``, or ``STASH`` attributes metadata of a cube. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-17_unpin_mpl.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-17_unpin_mpl.txt deleted file mode 100644 index bbee87037a..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-17_unpin_mpl.txt +++ /dev/null @@ -1,2 +0,0 @@ -* Supporting Iris for both Python2 and Python3 resulted in pinning our dependency on matplotlib at v2.x. - Now that Python2 support has been dropped, Iris is free to use the latest version of matplotlib. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-06_relax_geostationary.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-06_relax_geostationary.txt deleted file mode 100644 index e1113c838c..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-06_relax_geostationary.txt +++ /dev/null @@ -1,6 +0,0 @@ -* :class:`iris.coord_systems.Geostationary` can now accept creation arguments of - `false_easting=None` or `false_northing=None`, equivalent to values of 0. - Previously these kwargs could be omitted, but could not be set to `None`. - This also enables loading netcdf data on a Geostationary grid, where either of these - keys is not present as a grid-mapping variable property : Previously, loading any - such data caused an exception. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-31_nimrod_format_enhancement.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-31_nimrod_format_enhancement.txt deleted file mode 100644 index 454fc3617f..0000000000 --- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-31_nimrod_format_enhancement.txt +++ /dev/null @@ -1,3 +0,0 @@ -* The :class:`~iris.fileformats.nimrod` provides richer meta-data translation -when loading Nimrod-format data into cubes. This covers most known operational -use-cases. diff --git a/docs/iris/src/whatsnew/index.rst b/docs/iris/src/whatsnew/index.rst index 03834a43a7..a574e7a689 100644 --- a/docs/iris/src/whatsnew/index.rst +++ b/docs/iris/src/whatsnew/index.rst @@ -6,11 +6,11 @@ What's new in Iris These "What's new" pages describe the important changes between major Iris versions. + .. toctree:: - :maxdepth: 2 + :maxdepth: 1 latest.rst - 3.0.rst 2.4.rst 2.3.rst 2.2.rst diff --git a/docs/iris/src/whatsnew/latest.rst b/docs/iris/src/whatsnew/latest.rst new file mode 100644 index 0000000000..a32aca6d5f --- /dev/null +++ b/docs/iris/src/whatsnew/latest.rst @@ -0,0 +1,126 @@ + +************ + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== + +* The :mod:`~iris.fileformats.nimrod` module provides richer meta-data translation + when loading ``Nimrod`` data into cubes. This covers most known + operational use-cases. + +* Statistical operations :meth:`iris.cube.Cube.collapsed`, + :meth:`iris.cube.Cube.aggregated_by` and :meth:`iris.cube.Cube.rolling_window` + previously removed every :class:`iris.coord.CellMeasure` attached to the + cube. Now, a :class:`iris.coord.CellMeasure` will only be removed if it is + associated with an axis over which the statistic is being run. + +* Supporting ``Iris`` for both ``Python2`` and ``Python3`` resulted in pinning our + dependency on `matplotlib`_ at ``v2.x``. Now that ``Python2`` support has + been dropped, ``Iris`` is free to use the latest version of `matplotlib`_. + +* `CF Ancillary Data`_ variables are now supported. + + +Bugs Fixed +========== + +* The method :meth:`~iris.Cube.cube.remove_coord` would fail to remove derived + coordinates, will now remove derived coordinates by removing aux_factories. + +* The ``__iter__()`` method in :class:`~iris.cube.Cube` was set to ``None``. + ``TypeError`` is still raised if a :class:`~iris.cube.Cube` is iterated over + but ``isinstance(cube, collections.Iterable)`` now behaves as expected. + +* Concatenating cubes along an axis shared by cell measures would cause + concatenation to inappropriately fail. These cell measures are now + concatenated together in the resulting cube. + +* Copying a cube would previously ignore any attached + :class:`~iris.coords.CellMeasure`. These are now copied over. + +* A :class:`~iris.coords.CellMeasure` requires a string ``measure`` attribute + to be defined, which can only have a value of ``area`` or ``volume``. + Previously, the ``measure`` was provided as a keyword argument to + :class:`~iris.coords.CellMeasure` with an default value of ``None``, which + caused a ``TypeError`` when no ``measure`` was provided. The default value + of ``area`` is now used. + + +Incompatible Changes +==================== + +* The method :meth:`~iris.cube.CubeList.extract_strict`, and the ``strict`` + keyword to :meth:`~iris.cube.CubeList.extract` method have been removed, and + are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube` and + :meth:`~iris.cube.CubeList.extract_cubes`. + The new routines perform the same operation, but in a style more like other + ``Iris`` functions such as :meth:`~iris.load_cube` and :meth:`~iris.load_cubes`. + Unlike ``strict`` extraction, the type of return value is now completely + consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a + :class:`~iris.cube.Cube`, and :meth:`~iris.cube.CubeList.extract_cubes` + always returns an :class:`iris.cube.CubeList` of a length equal to the + number of constraints. + +* The former function ``iris.analysis.coord_comparison`` has been removed. + +* The :func:`iris.experimental.equalise_cubes.equalise_attributes` function + has been moved from the :mod:`iris.experimental` module into the + :mod:`iris.util` module. Please use the :func:`iris.util.equalise_attributes` + function instead. + +* The :mod:`iris.experimental.concatenate` module has now been removed. In + ``v1.6.0`` the experimental ``concatenate`` functionality was moved to the + :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the + :func:`iris.experimental.concatenate.concatenate` function raised an + exception. + + +Deprecations +============ + +* The deprecated :class:`iris.Future` flags ``cell_date_time_objects``, + ``netcdf_promote``, ``netcdf_no_unlimited`` and ``clip_latitudes`` have + been removed. + +* :attr:`iris.fileformats.pp.PPField.lbproc` is now an ``int``. The + deprecated attributes ``flag1``, ``flag2`` etc. have been removed from it. + + +Documentation +============= + +* Moved the :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py` + from the general part of the gallery to oceanography. + +* Updated documentation to use a modern sphinx theme and be served from + https://scitools-iris.readthedocs.io/en/latest/. + +* Added support for the `black `_ code + formatter. This is now automatically checked on GitHub PRs, replacing the + older, unittest-based "iris.tests.test_coding_standards.TestCodeFormat". + Black provides automatic code format correction for most IDEs. See the new + developer guide section on :ref:`iris_code_format`. + +* Refreshed the :ref:`whats_new_contributions` for the :ref:`iris_whatsnew`. + This includes always creating the ``latest`` what's new page so it appears + on the latest documentation at + https://scitools-iris.readthedocs.io/en/latest/whatsnew. This resolves + :issue:`2104` and :issue:`3451`. Also updated the + :ref:`iris_development_releases_steps` to follow when making a release. + +* Enabled the PDF creation of the documentation on the `Read the Docs`_ service. + The PDF may be accessed by clicking on the version at the bottom of the side + bar, then selecting ``PDF`` from the ``Downloads`` section. + +.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/ +.. _matplotlib: https://matplotlib.org/ +.. _CF Ancillary Data: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#ancillary-data \ No newline at end of file diff --git a/docs/iris/src/whatsnew/latest.rst.template b/docs/iris/src/whatsnew/latest.rst.template new file mode 100644 index 0000000000..82f87d9e5a --- /dev/null +++ b/docs/iris/src/whatsnew/latest.rst.template @@ -0,0 +1,46 @@ + +************ + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. contents:: Skip to section: + :local: + :depth: 3 + + +Features +======== + +* N/A + + +Bugs Fixed +========== + +* N/A + + +Incompatible Changes +==================== + +* N/A + + +Dependencies +============ + +* N/A + + +Internal +======== + +* N/A + + +Documentation +============= + +* N/A \ No newline at end of file diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 646613d114..6bda3aa274 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -9,7 +9,6 @@ """ from collections import defaultdict, namedtuple -from copy import deepcopy import dask.array as da import numpy as np @@ -69,7 +68,7 @@ class _CoordMetaData( Args: * defn: - The :class:`iris.coords.CoordDefn` metadata that represents a + The :class:`iris.common.CoordMetadata` metadata that represents a coordinate. * dims: @@ -86,7 +85,7 @@ class _CoordMetaData( """ - def __new__(cls, coord, dims): + def __new__(mcs, coord, dims): """ Create a new :class:`_CoordMetaData` instance. @@ -102,7 +101,7 @@ def __new__(cls, coord, dims): The new class instance. """ - defn = coord._as_defn() + defn = coord.metadata points_dtype = coord.points.dtype bounds_dtype = coord.bounds.dtype if coord.bounds is not None else None kwargs = {} @@ -121,7 +120,7 @@ def __new__(cls, coord, dims): order = _DECREASING kwargs["order"] = order metadata = super().__new__( - cls, defn, dims, points_dtype, bounds_dtype, kwargs + mcs, defn, dims, points_dtype, bounds_dtype, kwargs ) return metadata @@ -195,7 +194,7 @@ def __new__(cls, ancil, dims): The new class instance. """ - defn = ancil._as_defn() + defn = ancil.metadata metadata = super().__new__(cls, defn, dims) return metadata @@ -404,11 +403,11 @@ def __init__(self, cube): axes = dict(T=0, Z=1, Y=2, X=3) # Coordinate sort function - by guessed coordinate axis, then - # by coordinate definition, then by dimensions, in ascending order. + # by coordinate name, then by dimensions, in ascending order. def key_func(coord): return ( axes.get(guess_coord_axis(coord), len(axes) + 1), - coord._as_defn(), + coord.name(), cube.coord_dims(coord), ) @@ -423,7 +422,7 @@ def key_func(coord): self.scalar_coords.append(coord) def meta_key_func(dm): - return (dm._as_defn(), dm.cube_dims(cube)) + return (dm.metadata, dm.cube_dims(cube)) for cm in sorted(cube.cell_measures(), key=meta_key_func): dims = cube.cell_measure_dims(cm) @@ -991,6 +990,9 @@ def _build_aux_coordinates(self): points, bounds=bnds, **kwargs ) except ValueError: + # Ensure to remove the "circular" kwarg, which may be + # present in the defn of a DimCoord being demoted. + _ = kwargs.pop("circular", None) coord = iris.coords.AuxCoord( points, bounds=bnds, **kwargs ) diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 37daeec4aa..0f6a8ab6c6 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -131,7 +131,7 @@ def _coordless_match(self, cube): if self._name: # Require to also check against cube.name() for the fallback # "unknown" default case, when there is no name metadata available. - match = self._name in cube.names or self._name == cube.name() + match = self._name in cube._names or self._name == cube.name() if match and self._cube_func: match = self._cube_func(cube) return match @@ -515,6 +515,7 @@ def __init__( match. Kwargs: + * standard_name: A string or callable representing the standard name to match against. @@ -534,6 +535,7 @@ def __init__( where the standard_name is not set, then use standard_name=None. Returns: + * Boolean Example usage:: @@ -544,8 +546,8 @@ def __init__( iris.NameConstraint(standard_name='air_temperature', STASH=lambda stash: stash.item == 203) - """ + self.standard_name = standard_name self.long_name = long_name self.var_name = var_name diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index 9ea07e54b2..ed6dd784f2 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -22,8 +22,9 @@ is_lazy_data, multidim_lazy_stack, ) -import iris.cube import iris.coords +from iris.common import CoordMetadata, CubeMetadata +import iris.cube import iris.exceptions import iris.util @@ -115,7 +116,7 @@ class _ScalarCoordPayload( Args: * defns: - A list of scalar coordinate definitions :class:`iris.coords.CoordDefn` + A list of scalar coordinate metadata :class:`iris.common.CoordMetadata` belonging to a :class:`iris.cube.Cube`. * values: @@ -1478,9 +1479,7 @@ def axis_and_name(name): ) else: bounds = None - kwargs = dict( - zip(iris.coords.CoordDefn._fields, defns[name]) - ) + kwargs = dict(zip(CoordMetadata._fields, defns[name])) kwargs.update(metadata[name].kwargs) def name_in_independents(): @@ -1560,7 +1559,7 @@ def name_in_independents(): if bounds is not None: bounds[index] = name_value.bound - kwargs = dict(zip(iris.coords.CoordDefn._fields, defns[name])) + kwargs = dict(zip(CoordMetadata._fields, defns[name])) self._aux_templates.append( _Template(dims, points, bounds, kwargs) ) @@ -1594,7 +1593,7 @@ def _get_cube(self, data): (deepcopy(coord), dims) for coord, dims in self._aux_coords_and_dims ] - kwargs = dict(zip(iris.cube.CubeMetadata._fields, signature.defn)) + kwargs = dict(zip(CubeMetadata._fields, signature.defn)) cms_and_dims = [ (deepcopy(cm), dims) for cm, dims in self._cell_measures_and_dims @@ -1794,7 +1793,7 @@ def _extract_coord_payload(self, cube): # Coordinate sort function. # NB. This makes use of two properties which don't end up in - # the CoordDefn used by scalar_defns: `coord.points.dtype` and + # the metadata used by scalar_defns: `coord.points.dtype` and # `type(coord)`. def key_func(coord): points_dtype = coord.dtype @@ -1805,14 +1804,14 @@ def key_func(coord): axis_dict.get( iris.util.guess_coord_axis(coord), len(axis_dict) + 1 ), - coord._as_defn(), + coord.metadata, ) # Order the coordinates by hints, axis, and definition. for coord in sorted(coords, key=key_func): if not cube.coord_dims(coord) and coord.shape == (1,): # Extract the scalar coordinate data and metadata. - scalar_defns.append(coord._as_defn()) + scalar_defns.append(coord.metadata) # Because we know there's a single Cell in the # coordinate, it's quicker to roll our own than use # Coord.cell(). @@ -1844,14 +1843,14 @@ def key_func(coord): factory_defns = [] for factory in sorted( - cube.aux_factories, key=lambda factory: factory._as_defn() + cube.aux_factories, key=lambda factory: factory.metadata ): dependency_defns = [] dependencies = factory.dependencies for key in sorted(dependencies): coord = dependencies[key] if coord is not None: - dependency_defns.append((key, coord._as_defn())) + dependency_defns.append((key, coord.metadata)) factory_defn = _FactoryDefn(type(factory), dependency_defns) factory_defns.append(factory_defn) diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 5b7dff813d..a1e56533fd 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -27,11 +27,11 @@ The gallery contains several interesting worked examples of how an :class:`~iris.analysis.Aggregator` may be used, including: - * :ref:`Meteorology-COP_1d_plot` - * :ref:`General-SOI_filtering` - * :ref:`Meteorology-hovmoller` - * :ref:`Meteorology-lagged_ensemble` - * :ref:`General-custom_aggregation` + * :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_1d.py` + * :ref:`sphx_glr_generated_gallery_general_plot_SOI_filtering.py` + * :ref:`sphx_glr_generated_gallery_meteorology_plot_hovmoller.py` + * :ref:`sphx_glr_generated_gallery_meteorology_plot_lagged_ensemble.py` + * :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py` """ @@ -319,7 +319,7 @@ def _dimensional_metadata_comparison(*cubes, object_get=None): eq = ( other_coord is coord or other_coord.name() == coord.name() - and other_coord._as_defn() == coord._as_defn() + and other_coord.metadata == coord.metadata ) if eq: coord_to_add_to_group = other_coord @@ -487,7 +487,8 @@ def __init__( A variety of ready-made aggregators are provided in this module, such as :data:`~iris.analysis.MEAN` and :data:`~iris.analysis.MAX`. Custom aggregators can also be created for special purposes, see - :ref:`General-custom_aggregation` for a worked example. + :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py` + for a worked example. """ #: Cube cell method string. @@ -604,7 +605,7 @@ def update_metadata(self, cube, coords, **kwargs): Kwargs: - * This function is intended to be used in conjuction with aggregate() + * This function is intended to be used in conjunction with aggregate() and should be passed the same keywords (for example, the "ddof" keyword for a standard deviation aggregator). @@ -802,7 +803,9 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): # order cube. for point in points: cube = collapsed_cube.copy() - coord = iris.coords.AuxCoord(point, long_name=coord_name) + coord = iris.coords.AuxCoord( + point, long_name=coord_name, units="percent" + ) cube.add_aux_coord(coord) cubes.append(cube) @@ -980,7 +983,7 @@ def update_metadata(self, cube, coords, **kwargs): Kwargs: - * This function is intended to be used in conjuction with aggregate() + * This function is intended to be used in conjunction with aggregate() and should be passed the same keywords (for example, the "ddof" keyword for a standard deviation aggregator). diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index 06f44dc951..7ff5430ca6 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -4,10 +4,7 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -import numpy as np - from iris.analysis._interpolation import get_xy_dim_coords, snapshot_grid -import iris import iris.experimental.regrid as eregrid diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index c7f084bc1b..261c93e8ef 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -147,11 +147,11 @@ def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"): connected by wraparound. Input can be either two arrays, two coordinates, or a single cube - containing two suitable coordinates identified with the 'x' and'y' axes. + containing two suitable coordinates identified with the 'x' and 'y' axes. Args: - The inputs (x [,y]) can be any of the folliwing : + The inputs (x [,y]) can be any of the following : * x (:class:`~iris.cube.Cube`): a grid cube with 2D X and Y coordinates, identified by 'axis'. diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 0670c073ae..71584f04c0 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -426,8 +426,7 @@ def _get_horizontal_coord(cube, axis): if len(coords) != 1: raise ValueError( "Cube {!r} must contain a single 1D {} " - "coordinate.".format(cube.name()), - axis, + "coordinate.".format(cube.name(), axis) ) return coords[0] diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 0de97b02f3..3a38b3b283 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -10,22 +10,27 @@ from functools import lru_cache import inspect +import logging import math import operator import warnings import cf_units +import dask.array as da import numpy as np from numpy import ma import iris.analysis +from iris.common import SERVICES, Resolve +from iris.common.lenient import _lenient_client import iris.coords import iris.cube import iris.exceptions import iris.util -import dask.array as da -from dask.array.core import broadcast_shapes + +# Configure the logger. +logger = logging.getLogger(__name__) @lru_cache(maxsize=128, typed=True) @@ -115,7 +120,9 @@ def abs(cube, in_place=False): _assert_is_cube(cube) new_dtype = _output_dtype(np.abs, cube.dtype, in_place=in_place) op = da.absolute if cube.has_lazy_data() else np.abs - return _math_op_common(cube, op, cube.units, new_dtype, in_place=in_place) + return _math_op_common( + cube, op, cube.units, new_dtype=new_dtype, in_place=in_place + ) def intersection_of_cubes(cube, other_cube): @@ -179,43 +186,7 @@ def _assert_is_cube(cube): ) -def _assert_compatible(cube, other): - """ - Checks to see if cube.data and another array can be broadcast to - the same shape. - - """ - try: - new_shape = broadcast_shapes(cube.shape, other.shape) - except ValueError as err: - # re-raise - raise ValueError( - "The array was not broadcastable to the cube's data " - "shape. The error message when " - "broadcasting:\n{}\nThe cube's shape was {} and the " - "array's shape was {}".format(err, cube.shape, other.shape) - ) - - if cube.shape != new_shape: - raise ValueError( - "The array operation would increase the size or " - "dimensionality of the cube. The new cube's data " - "would have had to become: {}".format(new_shape) - ) - - -def _assert_matching_units(cube, other, operation_name): - """ - Check that the units of the cube and the other item are the same, or if - the other does not have a unit, skip this test - """ - if cube.units != getattr(other, "units", cube.units): - msg = "Cannot use {!r} with differing units ({} & {})".format( - operation_name, cube.units, other.units - ) - raise iris.exceptions.NotYetImplementedError(msg) - - +@_lenient_client(services=SERVICES) def add(cube, other, dim=None, in_place=False): """ Calculate the sum of two cubes, or the sum of a cube and a @@ -249,7 +220,10 @@ def add(cube, other, dim=None, in_place=False): """ _assert_is_cube(cube) new_dtype = _output_dtype( - operator.add, cube.dtype, _get_dtype(other), in_place=in_place + operator.add, + cube.dtype, + second_dtype=_get_dtype(other), + in_place=in_place, ) if in_place: _inplace_common_checks(cube, other, "addition") @@ -261,6 +235,7 @@ def add(cube, other, dim=None, in_place=False): ) +@_lenient_client(services=SERVICES) def subtract(cube, other, dim=None, in_place=False): """ Calculate the difference between two cubes, or the difference between @@ -294,7 +269,10 @@ def subtract(cube, other, dim=None, in_place=False): """ _assert_is_cube(cube) new_dtype = _output_dtype( - operator.sub, cube.dtype, _get_dtype(other), in_place=in_place + operator.sub, + cube.dtype, + second_dtype=_get_dtype(other), + in_place=in_place, ) if in_place: _inplace_common_checks(cube, other, "subtraction") @@ -335,30 +313,15 @@ def _add_subtract_common( """ _assert_is_cube(cube) - _assert_matching_units(cube, other, operation_name) - - if isinstance(other, iris.cube.Cube): - # get a coordinate comparison of this cube and the cube to do the - # operation with - coord_comp = iris.analysis._dimensional_metadata_comparison( - cube, other - ) - bad_coord_grps = ( - coord_comp["ungroupable_and_dimensioned"] - + coord_comp["resamplable"] + if cube.units != getattr(other, "units", cube.units): + emsg = ( + f"Cannot use {operation_name!r} with differing units " + f"({cube.units} & {other.units})" ) - if bad_coord_grps: - raise ValueError( - "This operation cannot be performed as there are " - "differing coordinates (%s) remaining " - "which cannot be ignored." - % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps}) - ) - else: - coord_comp = None + raise iris.exceptions.NotYetImplementedError(emsg) - new_cube = _binary_op_common( + result = _binary_op_common( operation_function, operation_name, cube, @@ -369,17 +332,10 @@ def _add_subtract_common( in_place=in_place, ) - if coord_comp: - # If a coordinate is to be ignored - remove it - ignore = filter( - None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]] - ) - for coord in ignore: - new_cube.remove_coord(coord) - - return new_cube + return result +@_lenient_client(services=SERVICES) def multiply(cube, other, dim=None, in_place=False): """ Calculate the product of a cube and another cube or coordinate. @@ -403,38 +359,23 @@ def multiply(cube, other, dim=None, in_place=False): """ _assert_is_cube(cube) + new_dtype = _output_dtype( - operator.mul, cube.dtype, _get_dtype(other), in_place=in_place + operator.mul, + cube.dtype, + second_dtype=_get_dtype(other), + in_place=in_place, ) other_unit = getattr(other, "units", "1") new_unit = cube.units * other_unit + if in_place: _inplace_common_checks(cube, other, "multiplication") op = operator.imul else: op = operator.mul - if isinstance(other, iris.cube.Cube): - # get a coordinate comparison of this cube and the cube to do the - # operation with - coord_comp = iris.analysis._dimensional_metadata_comparison( - cube, other - ) - bad_coord_grps = ( - coord_comp["ungroupable_and_dimensioned"] - + coord_comp["resamplable"] - ) - if bad_coord_grps: - raise ValueError( - "This operation cannot be performed as there are " - "differing coordinates (%s) remaining " - "which cannot be ignored." - % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps}) - ) - else: - coord_comp = None - - new_cube = _binary_op_common( + result = _binary_op_common( op, "multiply", cube, @@ -445,15 +386,7 @@ def multiply(cube, other, dim=None, in_place=False): in_place=in_place, ) - if coord_comp: - # If a coordinate is to be ignored - remove it - ignore = filter( - None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]] - ) - for coord in ignore: - new_cube.remove_coord(coord) - - return new_cube + return result def _inplace_common_checks(cube, other, math_op): @@ -475,6 +408,7 @@ def _inplace_common_checks(cube, other, math_op): ) +@_lenient_client(services=SERVICES) def divide(cube, other, dim=None, in_place=False): """ Calculate the division of a cube by a cube or coordinate. @@ -498,44 +432,29 @@ def divide(cube, other, dim=None, in_place=False): """ _assert_is_cube(cube) + new_dtype = _output_dtype( - operator.truediv, cube.dtype, _get_dtype(other), in_place=in_place + operator.truediv, + cube.dtype, + second_dtype=_get_dtype(other), + in_place=in_place, ) other_unit = getattr(other, "units", "1") new_unit = cube.units / other_unit + if in_place: if cube.dtype.kind in "iu": # Cannot coerce float result from inplace division back to int. - aemsg = ( - "Cannot perform inplace division of cube {!r} " + emsg = ( + f"Cannot perform inplace division of cube {cube.name()!r} " "with integer data." ) - raise ArithmeticError(aemsg) + raise ArithmeticError(emsg) op = operator.itruediv else: op = operator.truediv - if isinstance(other, iris.cube.Cube): - # get a coordinate comparison of this cube and the cube to do the - # operation with - coord_comp = iris.analysis._dimensional_metadata_comparison( - cube, other - ) - bad_coord_grps = ( - coord_comp["ungroupable_and_dimensioned"] - + coord_comp["resamplable"] - ) - if bad_coord_grps: - raise ValueError( - "This operation cannot be performed as there are " - "differing coordinates (%s) remaining " - "which cannot be ignored." - % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps}) - ) - else: - coord_comp = None - - new_cube = _binary_op_common( + result = _binary_op_common( op, "divide", cube, @@ -546,15 +465,7 @@ def divide(cube, other, dim=None, in_place=False): in_place=in_place, ) - if coord_comp: - # If a coordinate is to be ignored - remove it - ignore = filter( - None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]] - ) - for coord in ignore: - new_cube.remove_coord(coord) - - return new_cube + return result def exponentiate(cube, exponent, in_place=False): @@ -585,7 +496,10 @@ def exponentiate(cube, exponent, in_place=False): """ _assert_is_cube(cube) new_dtype = _output_dtype( - operator.pow, cube.dtype, _get_dtype(exponent), in_place=in_place + operator.pow, + cube.dtype, + second_dtype=_get_dtype(exponent), + in_place=in_place, ) if cube.has_lazy_data(): @@ -598,7 +512,11 @@ def power(data, out=None): return np.power(data, exponent, out) return _math_op_common( - cube, power, cube.units ** exponent, new_dtype, in_place=in_place + cube, + power, + cube.units ** exponent, + new_dtype=new_dtype, + in_place=in_place, ) @@ -628,7 +546,7 @@ def exp(cube, in_place=False): new_dtype = _output_dtype(np.exp, cube.dtype, in_place=in_place) op = da.exp if cube.has_lazy_data() else np.exp return _math_op_common( - cube, op, cf_units.Unit("1"), new_dtype, in_place=in_place + cube, op, cf_units.Unit("1"), new_dtype=new_dtype, in_place=in_place ) @@ -654,7 +572,11 @@ def log(cube, in_place=False): new_dtype = _output_dtype(np.log, cube.dtype, in_place=in_place) op = da.log if cube.has_lazy_data() else np.log return _math_op_common( - cube, op, cube.units.log(math.e), new_dtype, in_place=in_place + cube, + op, + cube.units.log(math.e), + new_dtype=new_dtype, + in_place=in_place, ) @@ -680,7 +602,7 @@ def log2(cube, in_place=False): new_dtype = _output_dtype(np.log2, cube.dtype, in_place=in_place) op = da.log2 if cube.has_lazy_data() else np.log2 return _math_op_common( - cube, op, cube.units.log(2), new_dtype, in_place=in_place + cube, op, cube.units.log(2), new_dtype=new_dtype, in_place=in_place ) @@ -706,12 +628,12 @@ def log10(cube, in_place=False): new_dtype = _output_dtype(np.log10, cube.dtype, in_place=in_place) op = da.log10 if cube.has_lazy_data() else np.log10 return _math_op_common( - cube, op, cube.units.log(10), new_dtype, in_place=in_place + cube, op, cube.units.log(10), new_dtype=new_dtype, in_place=in_place ) def apply_ufunc( - ufunc, cube, other_cube=None, new_unit=None, new_name=None, in_place=False + ufunc, cube, other=None, new_unit=None, new_name=None, in_place=False ): """ Apply a `numpy universal function @@ -735,7 +657,7 @@ def apply_ufunc( Kwargs: - * other_cube: + * other: An instance of :class:`iris.cube.Cube` to be given as the second argument to :func:`numpy.ufunc`. @@ -758,51 +680,59 @@ def apply_ufunc( """ if not isinstance(ufunc, np.ufunc): - name = getattr(ufunc, "__name__", "function passed to apply_ufunc") - - raise TypeError( - "{} is not recognised (it is not an instance of " - "numpy.ufunc)".format(name) + ufunc_name = getattr( + ufunc, "__name__", "function passed to apply_ufunc" ) + emsg = f"{ufunc_name} is not recognised, it is not an instance of numpy.ufunc" + raise TypeError(emsg) + + ufunc_name = ufunc.__name__ if ufunc.nout != 1: - raise ValueError( - "{} returns {} objects, apply_ufunc currently " - "only supports ufunc functions returning a single " - "object.".format(ufunc.__name__, ufunc.nout) + emsg = ( + f"{ufunc_name} returns {ufunc.nout} objects, apply_ufunc currently " + "only supports numpy.ufunc functions returning a single object." ) + raise ValueError(emsg) - if ufunc.nin == 2: - if other_cube is None: - raise ValueError( - "{} requires two arguments, so other_cube " - "must also be passed to apply_ufunc".format(ufunc.__name__) + if ufunc.nin == 1: + if other is not None: + dmsg = ( + "ignoring surplus 'other' argument to apply_ufunc, " + f"provided ufunc {ufunc_name!r} only requires 1 input" ) + logger.debug(dmsg) - _assert_is_cube(other_cube) + new_dtype = _output_dtype(ufunc, cube.dtype, in_place=in_place) + + new_cube = _math_op_common( + cube, ufunc, new_unit, new_dtype=new_dtype, in_place=in_place + ) + elif ufunc.nin == 2: + if other is None: + emsg = ( + f"{ufunc_name} requires two arguments, another cube " + "must also be passed to apply_ufunc." + ) + raise ValueError(emsg) + + _assert_is_cube(other) new_dtype = _output_dtype( - ufunc, cube.dtype, other_cube.dtype, in_place=in_place + ufunc, cube.dtype, second_dtype=other.dtype, in_place=in_place ) new_cube = _binary_op_common( ufunc, - ufunc.__name__, + ufunc_name, cube, - other_cube, + other, new_unit, new_dtype=new_dtype, in_place=in_place, ) - - elif ufunc.nin == 1: - new_dtype = _output_dtype(ufunc, cube.dtype, in_place=in_place) - - new_cube = _math_op_common( - cube, ufunc, new_unit, new_dtype, in_place=in_place - ) - else: - raise ValueError(ufunc.__name__ + ".nin should be 1 or 2.") + emsg = f"Provided ufunc '{ufunc_name}.nin' must be 1 or 2." + raise ValueError(emsg) new_cube.rename(new_name) @@ -838,39 +768,63 @@ def _binary_op_common( `cube` and `cube.data` """ _assert_is_cube(cube) + + # Flag to notify the _math_op_common function to simply wrap the resultant + # data of the maths operation in a cube with no metadata. + skeleton_cube = False + if isinstance(other, iris.coords.Coord): - other = _broadcast_cube_coord_data(cube, other, operation_name, dim) + # The rhs must be an array. + rhs = _broadcast_cube_coord_data(cube, other, operation_name, dim=dim) elif isinstance(other, iris.cube.Cube): - try: - broadcast_shapes(cube.shape, other.shape) - except ValueError: - other = iris.util.as_compatible_shape(other, cube) - other = other.core_data() - else: - other = np.asanyarray(other) + # Prepare to resolve the cube operands and associated coordinate + # metadata into the resultant cube. + resolver = Resolve(cube, other) + + # Get the broadcast, auto-transposed safe versions of the cube operands. + cube = resolver.lhs_cube_resolved + other = resolver.rhs_cube_resolved - # don't worry about checking for other data types (such as scalars or - # np.ndarrays) because _assert_compatible validates that they are broadcast - # compatible with cube.data - _assert_compatible(cube, other) + # Flag that it's safe to wrap the resultant data of the math operation + # in a cube with no metadata, as all of the metadata of the resultant + # cube is being managed by the resolver. + skeleton_cube = True - def unary_func(x): - ret = operation_function(x, other) - if ret is NotImplemented: - # explicitly raise the TypeError, so it gets raised even if, for + # The rhs must be an array. + rhs = other.core_data() + else: + # The rhs must be an array. + rhs = np.asanyarray(other) + + def unary_func(lhs): + data = operation_function(lhs, rhs) + if data is NotImplemented: + # Explicitly raise the TypeError, so it gets raised even if, for # example, `iris.analysis.maths.multiply(cube, other)` is called - # directly instead of `cube * other` - raise TypeError( - "cannot %s %r and %r objects" - % ( - operation_function.__name__, - type(x).__name__, - type(other).__name__, - ) + # directly instead of `cube * other`. + emsg = ( + f"Cannot {operation_function.__name__} {type(lhs).__name__!r} " + f"and {type(rhs).__name__} objects." ) - return ret + raise TypeError(emsg) + return data + + result = _math_op_common( + cube, + unary_func, + new_unit, + new_dtype=new_dtype, + in_place=in_place, + skeleton_cube=skeleton_cube, + ) - return _math_op_common(cube, unary_func, new_unit, new_dtype, in_place) + if isinstance(other, iris.cube.Cube): + # Insert the resultant data from the maths operation + # within the resolved cube. + result = resolver.cube(result.core_data(), in_place=in_place) + _sanitise_metadata(result, new_unit) + + return result def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): @@ -915,26 +869,64 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): return points +def _sanitise_metadata(cube, unit): + """ + As part of the maths metadata contract, clear the necessary or + unsupported metadata from the resultant cube of the maths operation. + + """ + # Clear the cube names. + cube.rename(None) + + # Clear the cube cell methods. + cube.cell_methods = None + + # Clear the cell measures. + for cm in cube.cell_measures(): + cube.remove_cell_measure(cm) + + # Clear the ancillary variables. + for av in cube.ancillary_variables(): + cube.remove_ancillary_variable(av) + + # Clear the STASH attribute, if present. + if "STASH" in cube.attributes: + del cube.attributes["STASH"] + + # Set the cube units. + cube.units = unit + + def _math_op_common( - cube, operation_function, new_unit, new_dtype=None, in_place=False + cube, + operation_function, + new_unit, + new_dtype=None, + in_place=False, + skeleton_cube=False, ): _assert_is_cube(cube) - if in_place: - new_cube = cube + if in_place and not skeleton_cube: if cube.has_lazy_data(): - new_cube.data = operation_function(cube.lazy_data()) + cube.data = operation_function(cube.lazy_data()) else: try: operation_function(cube.data, out=cube.data) except TypeError: - # Non ufunc function + # Non-ufunc function operation_function(cube.data) + new_cube = cube else: - new_cube = cube.copy(data=operation_function(cube.core_data())) + data = operation_function(cube.core_data()) + if skeleton_cube: + # Simply wrap the resultant data in a cube, as no + # cube metadata is required by the caller. + new_cube = iris.cube.Cube(data) + else: + new_cube = cube.copy(data) - # If the result of the operation is scalar and masked, we need to fix up - # the dtype + # If the result of the operation is scalar and masked, we need to fix-up the dtype. if ( new_dtype is not None and not new_cube.has_lazy_data() @@ -943,8 +935,8 @@ def _math_op_common( ): new_cube.data = ma.masked_array(0, 1, dtype=new_dtype) - iris.analysis.clear_phenomenon_identity(new_cube) - new_cube.units = new_unit + _sanitise_metadata(new_cube, new_unit) + return new_cube @@ -965,12 +957,12 @@ def __init__(self, data_func, units_func): are given as positional arguments. Should return another data array, with the same shape as the first array. - Can also have keyword arguments. + May also have keyword arguments. * units_func: - Function to calculate the unit of the resulting cube. - Should take the cube(s) as input and return + Function to calculate the units of the resulting cube. + Should take the cube/s as input and return an instance of :class:`cf_units.Unit`. Returns: @@ -1008,6 +1000,22 @@ def ws_units_func(u_cube, v_cube): cs_cube = cs_ifunc(cube, axis=1) """ + self._data_func_name = getattr( + data_func, "__name__", "data_func argument passed to IFunc" + ) + + if not callable(data_func): + emsg = f"{self._data_func_name} is not callable." + raise TypeError(emsg) + + self._unit_func_name = getattr( + units_func, "__name__", "units_func argument passed to IFunc" + ) + + if not callable(units_func): + emsg = f"{self._unit_func_name} is not callable." + raise TypeError(emsg) + if hasattr(data_func, "nin"): self.nin = data_func.nin else: @@ -1023,39 +1031,38 @@ def ws_units_func(u_cube, v_cube): self.nin = len(args) if self.nin not in [1, 2]: - msg = ( - "{} requires {} input data arrays, the IFunc class " - "currently only supports functions requiring 1 or two " - "data arrays as input." + emsg = ( + f"{self._data_func_name} requires {self.nin} input data " + "arrays, the IFunc class currently only supports functions " + "requiring 1 or 2 data arrays as input." ) - raise ValueError(msg.format(data_func.__name__, self.nin)) + raise ValueError(emsg) if hasattr(data_func, "nout"): if data_func.nout != 1: - msg = ( - "{} returns {} objects, the IFunc class currently " - "only supports functions returning a single object." - ) - raise ValueError( - msg.format(data_func.__name__, data_func.nout) + emsg = ( + f"{self._data_func_name} returns {data_func.nout} objects, " + "the IFunc class currently only supports functions " + "returning a single object." ) + raise ValueError(emsg) self.data_func = data_func - self.units_func = units_func def __repr__(self): - return "iris.analysis.maths.IFunc({}, {})".format( - self.data_func.__name__, self.units_func.__name__ + result = ( + f"iris.analysis.maths.IFunc({self._data_func_name}, " + f"{self._unit_func_name})" ) + return result def __str__(self): - return ( - "IFunc constructed from the data function {} " - "and the units function {}".format( - self.data_func.__name__, self.units_func.__name__ - ) + result = ( + f"IFunc constructed from the data function {self._data_func_name} " + f"and the units function {self._unit_func_name}" ) + return result def __call__( self, @@ -1105,11 +1112,27 @@ def wrap_data_func(*args, **kwargs): return self.data_func(*args, **kwargs_combined) - if self.nin == 2: + if self.nin == 1: + if other is not None: + dmsg = ( + "ignoring surplus 'other' argument to IFunc.__call__, " + f"provided data_func {self._data_func_name!r} only requires " + "1 input" + ) + logger.debug(dmsg) + + new_unit = self.units_func(cube) + + new_cube = _math_op_common( + cube, wrap_data_func, new_unit, in_place=in_place + ) + else: if other is None: - raise ValueError( - self.data_func.__name__ + " requires two arguments" + emsg = ( + f"{self._data_func_name} requires two arguments, another " + "cube must also be passed to IFunc.__call__." ) + raise ValueError(emsg) new_unit = self.units_func(cube, other) @@ -1123,21 +1146,6 @@ def wrap_data_func(*args, **kwargs): in_place=in_place, ) - elif self.nin == 1: - if other is not None: - raise ValueError( - self.data_func.__name__ + " requires one argument" - ) - - new_unit = self.units_func(cube) - - new_cube = _math_op_common( - cube, wrap_data_func, new_unit, in_place=in_place - ) - - else: - raise ValueError("self.nin should be 1 or 2.") - if new_name is not None: new_cube.rename(new_name) diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index ba3ed2504c..bb283a0e89 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -64,7 +64,7 @@ def pearsonr( correlation at each time/altitude point. Reference: - http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation + https://en.wikipedia.org/wiki/Pearson_correlation_coefficient This operation is non-lazy. diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index 11148188fa..5b63ff53ed 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -14,7 +14,11 @@ import dask.array as da import numpy as np -from iris._cube_coord_common import CFVariableMixin +from iris.common import ( + CFVariableMixin, + CoordMetadata, + metadata_manager_factory, +) import iris.coords @@ -33,14 +37,40 @@ class AuxCoordFactory(CFVariableMixin, metaclass=ABCMeta): """ def __init__(self): + # Configure the metadata manager. + if not hasattr(self, "_metadata_manager"): + self._metadata_manager = metadata_manager_factory(CoordMetadata) + #: Descriptive name of the coordinate made by the factory self.long_name = None #: netCDF variable name for the coordinate made by the factory self.var_name = None - #: Coordinate system (if any) of the coordinate made by the factory self.coord_system = None + # See the climatological property getter. + self._metadata_manager.climatological = False + + @property + def coord_system(self): + """ + The coordinate-system (if any) of the coordinate made by the factory. + + """ + return self._metadata_manager.coord_system + + @coord_system.setter + def coord_system(self, value): + self._metadata_manager.coord_system = value + + @property + def climatological(self): + """ + Always returns False, as a factory itself can never have points/bounds + and therefore can never be climatological by definition. + + """ + return self._metadata_manager.climatological @property @abstractmethod @@ -51,20 +81,6 @@ def dependencies(self): """ - def _as_defn(self): - defn = iris.coords.CoordDefn( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.coord_system, - # Slot for Coord 'climatological' property, which this - # doesn't have. - False, - ) - return defn - @abstractmethod def make_coord(self, coord_dims_func): """ @@ -372,6 +388,8 @@ def __init__(self, delta=None, sigma=None, orography=None): The coordinate providing the `orog` term. """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CoordMetadata) super().__init__() if delta and delta.nbounds not in (0, 2): @@ -395,21 +413,24 @@ def __init__(self, delta=None, sigma=None, orography=None): self.standard_name = "altitude" if delta is None and orography is None: - raise ValueError( - "Unable to determine units: no delta or orography" - " available." + emsg = ( + "Unable to determine units: no delta or orography " + "available." ) + raise ValueError(emsg) if delta and orography and delta.units != orography.units: - raise ValueError( - "Incompatible units: delta and orography must" - " have the same units." + emsg = ( + "Incompatible units: delta and orography must have " + "the same units." ) + raise ValueError(emsg) self.units = (delta and delta.units) or orography.units if not self.units.is_convertible("m"): - raise ValueError( - "Invalid units: delta and/or orography" - " must be expressed in length units." + emsg = ( + "Invalid units: delta and/or orography must be expressed " + "in length units." ) + raise ValueError(emsg) self.attributes = {"positive": "up"} @property @@ -556,10 +577,13 @@ def __init__(self, delta=None, sigma=None, surface_air_pressure=None): The coordinate providing the `ps` term. """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CoordMetadata) super().__init__() # Check that provided coords meet necessary conditions. self._check_dependencies(delta, sigma, surface_air_pressure) + self.units = (delta and delta.units) or surface_air_pressure.units self.delta = delta self.sigma = sigma @@ -568,20 +592,12 @@ def __init__(self, delta=None, sigma=None, surface_air_pressure=None): self.standard_name = "air_pressure" self.attributes = {} - @property - def units(self): - if self.delta is not None: - units = self.delta.units - else: - units = self.surface_air_pressure.units - return units - @staticmethod def _check_dependencies(delta, sigma, surface_air_pressure): # Check for sufficient coordinates. if delta is None and (sigma is None or surface_air_pressure is None): msg = ( - "Unable to contruct hybrid pressure coordinate factory " + "Unable to construct hybrid pressure coordinate factory " "due to insufficient source coordinates." ) raise ValueError(msg) @@ -753,7 +769,7 @@ def __init__( zlev=None, ): """ - Creates a ocean sigma over z coordinate factory with the formula: + Creates an ocean sigma over z coordinate factory with the formula: if k < nsigma: z(n, k, j, i) = eta(n, j, i) + sigma(k) * @@ -766,10 +782,13 @@ def __init__( either `eta`, or 'sigma' and `depth` and `depth_c` coordinates. """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CoordMetadata) super().__init__() # Check that provided coordinates meet necessary conditions. self._check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev) + self.units = zlev.units self.sigma = sigma self.eta = eta @@ -781,16 +800,12 @@ def __init__( self.standard_name = "sea_surface_height_above_reference_ellipsoid" self.attributes = {"positive": "up"} - @property - def units(self): - return self.zlev.units - @staticmethod def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev): # Check for sufficient factory coordinates. if zlev is None: raise ValueError( - "Unable to determine units: " "no zlev coordinate available." + "Unable to determine units: no zlev coordinate available." ) if nsigma is None: raise ValueError("Missing nsigma coordinate.") @@ -957,7 +972,7 @@ def make_coord(self, coord_dims_func): Args: * coord_dims_func: - A callable which can return the list of dimesions relevant + A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. """ @@ -1068,10 +1083,13 @@ def __init__(self, sigma=None, eta=None, depth=None): (depth(j, i) + eta(n, j, i)) """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CoordMetadata) super().__init__() # Check that provided coordinates meet necessary conditions. self._check_dependencies(sigma, eta, depth) + self.units = depth.units self.sigma = sigma self.eta = eta @@ -1080,10 +1098,6 @@ def __init__(self, sigma=None, eta=None, depth=None): self.standard_name = "sea_surface_height_above_reference_ellipsoid" self.attributes = {"positive": "up"} - @property - def units(self): - return self.depth.units - @staticmethod def _check_dependencies(sigma, eta, depth): # Check for sufficient factory coordinates. @@ -1252,10 +1266,13 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None): S(k,j,i) = depth_c * s(k) + (depth(j,i) - depth_c) * C(k) """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CoordMetadata) super().__init__() # Check that provided coordinates meet necessary conditions. self._check_dependencies(s, c, eta, depth, depth_c) + self.units = depth.units self.s = s self.c = c @@ -1266,10 +1283,6 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None): self.standard_name = "sea_surface_height_above_reference_ellipsoid" self.attributes = {"positive": "up"} - @property - def units(self): - return self.depth.units - @staticmethod def _check_dependencies(s, c, eta, depth, depth_c): # Check for sufficient factory coordinates. @@ -1476,10 +1489,13 @@ def __init__( b * [tanh(a * (s(k) + 0.5)) / (2 * tanh(0.5*a)) - 0.5] """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CoordMetadata) super().__init__() # Check that provided coordinates meet necessary conditions. self._check_dependencies(s, eta, depth, a, b, depth_c) + self.units = depth.units self.s = s self.eta = eta @@ -1491,10 +1507,6 @@ def __init__( self.standard_name = "sea_surface_height_above_reference_ellipsoid" self.attributes = {"positive": "up"} - @property - def units(self): - return self.depth.units - @staticmethod def _check_dependencies(s, eta, depth, a, b, depth_c): # Check for sufficient factory coordinates. @@ -1695,10 +1707,13 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None): (depth_c + depth(j,i)) """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CoordMetadata) super().__init__() # Check that provided coordinates meet necessary conditions. self._check_dependencies(s, c, eta, depth, depth_c) + self.units = depth.units self.s = s self.c = c @@ -1709,10 +1724,6 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None): self.standard_name = "sea_surface_height_above_reference_ellipsoid" self.attributes = {"positive": "up"} - @property - def units(self): - return self.depth.units - @staticmethod def _check_dependencies(s, c, eta, depth, depth_c): # Check for sufficient factory coordinates. diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py new file mode 100644 index 0000000000..c540d81bc0 --- /dev/null +++ b/lib/iris/common/__init__.py @@ -0,0 +1,11 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + + +from .lenient import * +from .metadata import * +from .mixin import * +from .resolve import * diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py new file mode 100644 index 0000000000..802d854554 --- /dev/null +++ b/lib/iris/common/lenient.py @@ -0,0 +1,661 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +from collections.abc import Iterable +from contextlib import contextmanager +from copy import deepcopy +from functools import wraps +from inspect import getmodule +import threading + + +__all__ = [ + "LENIENT", + "Lenient", +] + + +#: Default _Lenient services global activation state. +_LENIENT_ENABLE_DEFAULT = True + +#: Default Lenient maths feature state. +_LENIENT_MATHS_DEFAULT = True + +#: Protected _Lenient internal non-client, non-service keys. +_LENIENT_PROTECTED = ("active", "enable") + + +def _lenient_client(*dargs, services=None): + """ + Decorator that allows a client function/method to declare at runtime that + it is executing and requires lenient behaviour from a prior registered + lenient service function/method. + + This decorator supports being called with no arguments e.g., + + @_lenient_client() + def func(): + pass + + This is equivalent to using it as a simple naked decorator e.g., + + @_lenient_client + def func() + pass + + Alternatively, this decorator supports the lenient client explicitly + declaring the lenient services that it wishes to use e.g., + + @_lenient_client(services=(service1, service2, ...) + def func(): + pass + + Args: + + * dargs (tuple of callable): + A tuple containing the callable lenient client function/method to be + wrapped by the decorator. This is automatically populated by Python + through the decorator interface. No argument requires to be manually + provided. + + Kwargs: + + * services (callable or str or iterable of callable/str) + Zero or more function/methods, or equivalent fully qualified string names, of + lenient service function/methods. + + Returns: + Closure wrapped function/method. + + """ + ndargs = len(dargs) + + if ndargs: + assert ( + ndargs == 1 + ), f"Invalid lenient client arguments, expecting 1 got {ndargs}." + assert callable( + dargs[0] + ), "Invalid lenient client argument, expecting a callable." + + assert not ( + ndargs and services + ), "Invalid lenient client, got both arguments and keyword arguments." + + if ndargs: + # The decorator has been used as a simple naked decorator. + (func,) = dargs + + @wraps(func) + def lenient_client_inner_naked(*args, **kwargs): + """ + Closure wrapper function to register the wrapped function/method + as active at runtime before executing it. + + """ + with _LENIENT.context(active=_qualname(func)): + result = func(*args, **kwargs) + return result + + result = lenient_client_inner_naked + else: + # The decorator has been called with None, zero or more explicit lenient services. + if services is None: + services = () + + if isinstance(services, str) or not isinstance(services, Iterable): + services = (services,) + + def lenient_client_outer(func): + @wraps(func) + def lenient_client_inner(*args, **kwargs): + """ + Closure wrapper function to register the wrapped function/method + as active at runtime before executing it. + + """ + with _LENIENT.context(*services, active=_qualname(func)): + result = func(*args, **kwargs) + return result + + return lenient_client_inner + + result = lenient_client_outer + + return result + + +def _lenient_service(*dargs): + """ + Decorator that allows a function/method to declare that it supports lenient + behaviour as a service. + + Registration is at Python interpreter parse time. + + The decorator supports being called with no arguments e.g., + + @_lenient_service() + def func(): + pass + + This is equivalent to using it as a simple naked decorator e.g., + + @_lenient_service + def func(): + pass + + Args: + + * dargs (tuple of callable): + A tuple containing the callable lenient service function/method to be + wrapped by the decorator. This is automatically populated by Python + through the decorator interface. No argument requires to be manually + provided. + + Returns: + Closure wrapped function/method. + + """ + ndargs = len(dargs) + + if ndargs: + assert ( + ndargs == 1 + ), f"Invalid lenient service arguments, expecting 1 got {ndargs}." + assert callable( + dargs[0] + ), "Invalid lenient service argument, expecting a callable." + + if ndargs: + # The decorator has been used as a simple naked decorator. + # Thus the (single) argument is a function to be wrapped. + # We just register the argument function as a lenient service, and + # return it unchanged + (func,) = dargs + + _LENIENT.register_service(func) + + # This decorator registers 'func': the func itself is unchanged. + result = func + + else: + # The decorator has been called with no arguments. + # Return a decorator, to apply to 'func' immediately following. + def lenient_service_outer(func): + _LENIENT.register_service(func) + + # Decorator registers 'func', but func itself is unchanged. + return func + + result = lenient_service_outer + + return result + + +def _qualname(func): + """ + Return the fully qualified function/method string name. + + Args: + + * func (callable): + Callable function/method. Non-callable arguments are simply + passed through. + + .. note:: + Inherited methods will be qualified with the base class that + defines the method. + + """ + result = func + if callable(func): + module = getmodule(func) + result = f"{module.__name__}.{func.__qualname__}" + + return result + + +class Lenient(threading.local): + def __init__(self, **kwargs): + """ + A container for managing the run-time lenient features and options. + + Kwargs: + + * kwargs (dict) + Mapping of lenient key/value options to enable/disable. Note that, + only the lenient "maths" options is available, which controls + lenient/strict cube arithmetic. + + For example:: + + Lenient(maths=False) + + Note that, the values of these options are thread-specific. + + """ + # Configure the initial default lenient state. + self._init() + + if not kwargs: + # If not specified, set the default behaviour of the maths lenient feature. + kwargs = dict(maths=_LENIENT_MATHS_DEFAULT) + + # Configure the provided (or default) lenient features. + for feature, state in kwargs.items(): + self[feature] = state + + def __contains__(self, key): + return key in self.__dict__ + + def __getitem__(self, key): + if key not in self.__dict__: + cls = self.__class__.__name__ + emsg = f"Invalid {cls!r} option, got {key!r}." + raise KeyError(emsg) + return self.__dict__[key] + + def __repr__(self): + cls = self.__class__.__name__ + msg = f"{cls}(maths={self.__dict__['maths']!r})" + return msg + + def __setitem__(self, key, value): + cls = self.__class__.__name__ + + if key not in self.__dict__: + emsg = f"Invalid {cls!r} option, got {key!r}." + raise KeyError(emsg) + + if not isinstance(value, bool): + emsg = f"Invalid {cls!r} option {key!r} value, got {value!r}." + raise ValueError(emsg) + + self.__dict__[key] = value + # Toggle the (private) lenient behaviour. + _LENIENT.enable = value + + def _init(self): + """Configure the initial default lenient state.""" + # This is the only public supported lenient feature i.e., cube arithmetic + self.__dict__["maths"] = None + + @contextmanager + def context(self, **kwargs): + """ + Return a context manager which allows temporary modification of the + lenient option state within the scope of the context manager. + + On entry to the context manager, all provided keyword arguments are + applied. On exit from the context manager, the previous lenient + option state is restored. + + For example:: + with iris.common.Lenient.context(maths=False): + pass + + """ + + def configure_state(state): + for feature, value in state.items(): + self[feature] = value + + # Save the original state. + original_state = deepcopy(self.__dict__) + + # Configure the provided lenient features. + configure_state(kwargs) + + try: + yield + finally: + # Restore the original state. + self.__dict__.clear() + self._init() + configure_state(original_state) + + +############################################################################### + + +class _Lenient(threading.local): + def __init__(self, *args, **kwargs): + """ + A container for managing the run-time lenient services and client + options for pre-defined functions/methods. + + Args: + + * args (callable or str or iterable of callable/str) + A function/method or fully qualified string name of the function/method + acting as a lenient service. + + Kwargs: + + * kwargs (dict of callable/str or iterable of callable/str) + Mapping of lenient client function/method, or fully qualified string name + of the function/method, to one or more lenient service + function/methods or fully qualified string name of function/methods. + + For example:: + + _Lenient(service1, service2, client1=service1, client2=(service1, service2)) + + Note that, the values of these options are thread-specific. + + """ + # The executing lenient client at runtime. + self.__dict__["active"] = None + # The global lenient services state activation switch. + self.__dict__["enable"] = _LENIENT_ENABLE_DEFAULT + + for service in args: + self.register_service(service) + + for client, services in kwargs.items(): + self.register_client(client, services) + + def __call__(self, func): + """ + Determine whether it is valid for the function/method to provide a + lenient service at runtime to the actively executing lenient client. + + Args: + + * func (callable or str): + A function/method or fully qualified string name of the function/method. + + Returns: + Boolean. + + """ + result = False + if self.__dict__["enable"]: + service = _qualname(func) + if service in self and self.__dict__[service]: + active = self.__dict__["active"] + if active is not None and active in self: + services = self.__dict__[active] + if isinstance(services, str) or not isinstance( + services, Iterable + ): + services = (services,) + result = service in services + return result + + def __contains__(self, name): + name = _qualname(name) + return name in self.__dict__ + + def __getattr__(self, name): + if name not in self.__dict__: + cls = self.__class__.__name__ + emsg = f"Invalid {cls!r} option, got {name!r}." + raise AttributeError(emsg) + return self.__dict__[name] + + def __getitem__(self, name): + name = _qualname(name) + if name not in self.__dict__: + cls = self.__class__.__name__ + emsg = f"Invalid {cls!r} option, got {name!r}." + raise KeyError(emsg) + return self.__dict__[name] + + def __repr__(self): + cls = self.__class__.__name__ + width = len(cls) + 1 + kwargs = [ + "{}={!r}".format(name, self.__dict__[name]) + for name in sorted(self.__dict__.keys()) + ] + joiner = ",\n{}".format(" " * width) + return "{}({})".format(cls, joiner.join(kwargs)) + + def __setitem__(self, name, value): + name = _qualname(name) + cls = self.__class__.__name__ + + if name not in self.__dict__: + emsg = f"Invalid {cls!r} option, got {name!r}." + raise KeyError(emsg) + + if name == "active": + value = _qualname(value) + if not isinstance(value, str) and value is not None: + emsg = f"Invalid {cls!r} option {name!r}, expected a registered {cls!r} client, got {value!r}." + raise ValueError(emsg) + self.__dict__[name] = value + elif name == "enable": + self.enable = value + else: + if isinstance(value, str) or callable(value): + value = (value,) + if isinstance(value, Iterable): + value = tuple([_qualname(item) for item in value]) + self.__dict__[name] = value + + @contextmanager + def context(self, *args, **kwargs): + """ + Return a context manager which allows temporary modification of + the lenient option state for the active thread. + + On entry to the context manager, all provided keyword arguments are + applied. On exit from the context manager, the previous lenient option + state is restored. + + For example:: + with iris._LENIENT.context(example_lenient_flag=False): + # ... code that expects some non-lenient behaviour + + .. note:: + iris._LENIENT.example_lenient_flag does not exist and is + provided only as an example. + + """ + + def update_client(client, services): + if client in self.__dict__: + existing_services = self.__dict__[client] + else: + existing_services = () + + self.__dict__[client] = tuple(set(existing_services + services)) + + # Save the original state. + original_state = deepcopy(self.__dict__) + + # Temporarily update the state with the kwargs first. + for name, value in kwargs.items(): + self[name] = value + + # Get the active client. + active = self.__dict__["active"] + + if args: + # Update the client with the provided services. + new_services = tuple([_qualname(arg) for arg in args]) + + if active is None: + # Ensure not to use "context" as the ephemeral name + # of the context manager runtime "active" lenient client, + # as this causes a namespace clash with this method + # i.e., _Lenient.context, via _Lenient.__getattr__ + active = "__context" + self.__dict__["active"] = active + self.__dict__[active] = new_services + else: + # Append provided services to any pre-existing services of the active client. + update_client(active, new_services) + else: + # Append previous ephemeral services (for non-specific client) to the active client. + if ( + active is not None + and active != "__context" + and "__context" in self.__dict__ + ): + new_services = self.__dict__["__context"] + update_client(active, new_services) + + try: + yield + finally: + # Restore the original state. + self.__dict__.clear() + self.__dict__.update(original_state) + + @property + def enable(self): + """Return the activation state of the lenient services.""" + return self.__dict__["enable"] + + @enable.setter + def enable(self, state): + """ + Set the activate state of the lenient services. + + Setting the state to `False` disables all lenient services, and + setting the state to `True` enables all lenient services. + + Args: + + * state (bool): + Activate state for lenient services. + + """ + if not isinstance(state, bool): + cls = self.__class__.__name__ + emsg = f"Invalid {cls!r} option 'enable', expected a {type(True)!r}, got {state!r}." + raise ValueError(emsg) + self.__dict__["enable"] = state + + def register_client(self, func, services, append=False): + """ + Add the provided mapping of lenient client function/method to + required lenient service function/methods. + + Args: + + * func (callable or str): + A client function/method or fully qualified string name of the + client function/method. + + * services (callable or str or iterable of callable/str): + One or more service function/methods or fully qualified string names + of the required service function/method. + + Kwargs: + + * append (bool): + If True, append the lenient services to any pre-registered lenient + services for the provided lenient client. Default is False. + + """ + func = _qualname(func) + cls = self.__class__.__name__ + + if func in _LENIENT_PROTECTED: + emsg = ( + f"Cannot register {cls!r} client. " + f"Please rename your client to be something other than {func!r}." + ) + raise ValueError(emsg) + if isinstance(services, str) or not isinstance(services, Iterable): + services = (services,) + if not len(services): + emsg = f"Require at least one {cls!r} client service." + raise ValueError(emsg) + services = tuple([_qualname(service) for service in services]) + if append: + # The original provided service order is not significant. There is + # no requirement to preserve it, so it's safe to sort. + existing = self.__dict__[func] if func in self else () + services = tuple(sorted(set(existing) | set(services))) + self.__dict__[func] = services + + def register_service(self, func): + """ + Add the provided function/method as providing a lenient service and + activate it. + + Args: + + * func (callable or str): + A service function/method or fully qualified string name of the + service function/method. + + """ + func = _qualname(func) + if func in _LENIENT_PROTECTED: + cls = self.__class__.__name__ + emsg = ( + f"Cannot register {cls!r} service. " + f"Please rename your service to be something other than {func!r}." + ) + raise ValueError(emsg) + self.__dict__[func] = True + + def unregister_client(self, func): + """ + Remove the provided function/method as a lenient client using lenient services. + + Args: + + * func (callable or str): + A function/method of fully qualified string name of the function/method. + + """ + func = _qualname(func) + cls = self.__class__.__name__ + + if func in _LENIENT_PROTECTED: + emsg = f"Cannot unregister {cls!r} client, as {func!r} is a protected {cls!r} option." + raise ValueError(emsg) + + if func in self.__dict__: + value = self.__dict__[func] + if isinstance(value, bool): + emsg = f"Cannot unregister {cls!r} client, as {func!r} is not a valid {cls!r} client." + raise ValueError(emsg) + del self.__dict__[func] + else: + emsg = f"Cannot unregister unknown {cls!r} client {func!r}." + raise ValueError(emsg) + + def unregister_service(self, func): + """ + Remove the provided function/method as providing a lenient service. + + Args: + + * func (callable or str): + A function/method or fully qualified string name of the function/method. + + """ + func = _qualname(func) + cls = self.__class__.__name__ + + if func in _LENIENT_PROTECTED: + emsg = f"Cannot unregister {cls!r} service, as {func!r} is a protected {cls!r} option." + raise ValueError(emsg) + + if func in self.__dict__: + value = self.__dict__[func] + if not isinstance(value, bool): + emsg = f"Cannot unregister {cls!r} service, as {func!r} is not a valid {cls!r} service." + raise ValueError(emsg) + del self.__dict__[func] + else: + emsg = f"Cannot unregister unknown {cls!r} service {func!r}." + raise ValueError(emsg) + + +#: (Private) Instance that manages all Iris run-time lenient client and service options. +_LENIENT = _Lenient() + +#: (Public) Instance that manages all Iris run-time lenient features. +LENIENT = Lenient() diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py new file mode 100644 index 0000000000..af097ab4ec --- /dev/null +++ b/lib/iris/common/metadata.py @@ -0,0 +1,1477 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +from abc import ABCMeta +from collections import namedtuple +from collections.abc import Iterable, Mapping +from copy import deepcopy +from functools import wraps +import logging +import re + +import numpy as np +import numpy.ma as ma +from xxhash import xxh64_hexdigest + +from .lenient import _LENIENT +from .lenient import _lenient_service as lenient_service +from .lenient import _qualname as qualname + + +__all__ = [ + "SERVICES_COMBINE", + "SERVICES_DIFFERENCE", + "SERVICES_EQUAL", + "SERVICES", + "AncillaryVariableMetadata", + "BaseMetadata", + "CellMeasureMetadata", + "CoordMetadata", + "CubeMetadata", + "DimCoordMetadata", + "metadata_manager_factory", +] + + +# https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name +_TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""") + +# Configure the logger. +logger = logging.getLogger(__name__) + + +def _hexdigest(value): + """ + Return a hexidecimal string hash representation of the provided value. + + Calculates a 64-bit non-cryptographic hash of the provided value, + and returns the hexdigest string representation of the calculated hash. + + """ + # Special case: deal with numpy arrays. + if ma.isMaskedArray(value): + parts = ( + value.shape, + xxh64_hexdigest(value.data), + xxh64_hexdigest(value.mask), + ) + value = str(parts) + elif isinstance(value, np.ndarray): + parts = (value.shape, xxh64_hexdigest(value)) + value = str(parts) + + try: + # Calculate single-shot hash to avoid allocating state on the heap + result = xxh64_hexdigest(value) + except TypeError: + # xxhash expects a bytes-like object, so try hashing the + # string representation of the provided value instead, but + # also fold in the object type... + parts = (type(value), value) + result = xxh64_hexdigest(str(parts)) + + return result + + +class _NamedTupleMeta(ABCMeta): + """ + Meta-class to support the convenience of creating a namedtuple from + names/members of the metadata class hierarchy. + + """ + + def __new__(mcs, name, bases, namespace): + names = [] + + for base in bases: + if hasattr(base, "_fields"): + base_names = getattr(base, "_fields") + is_abstract = getattr( + base_names, "__isabstractmethod__", False + ) + if not is_abstract: + if (not isinstance(base_names, Iterable)) or isinstance( + base_names, str + ): + base_names = (base_names,) + names.extend(base_names) + + if "_members" in namespace and not getattr( + namespace["_members"], "__isabstractmethod__", False + ): + namespace_names = namespace["_members"] + + if (not isinstance(namespace_names, Iterable)) or isinstance( + namespace_names, str + ): + namespace_names = (namespace_names,) + + names.extend(namespace_names) + + if names: + item = namedtuple(f"{name}Namedtuple", names) + bases = list(bases) + # Influence the appropriate MRO. + bases.insert(0, item) + bases = tuple(bases) + + return super().__new__(mcs, name, bases, namespace) + + +class BaseMetadata(metaclass=_NamedTupleMeta): + """ + Container for common metadata. + + """ + + DEFAULT_NAME = "unknown" # the fall-back name for metadata identity + + _members = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + ) + + __slots__ = () + + @lenient_service + def __eq__(self, other): + """ + Determine whether the associated metadata members are equivalent. + + Args: + + * other (metadata): + A metadata instance of the same type. + + Returns: + Boolean. + + """ + result = NotImplemented + # Only perform equivalence with similar class instances. + if hasattr(other, "__class__") and other.__class__ is self.__class__: + if _LENIENT(self.__eq__) or _LENIENT(self.equal): + # Perform "lenient" equality. + logger.debug( + "lenient", extra=dict(cls=self.__class__.__name__) + ) + result = self._compare_lenient(other) + else: + # Perform "strict" equality. + logger.debug("strict", extra=dict(cls=self.__class__.__name__)) + + def func(field): + left = getattr(self, field) + right = getattr(other, field) + if self._is_attributes(field, left, right): + result = self._compare_strict_attributes(left, right) + else: + result = left == right + return result + + # Note that, for strict we use "_fields" not "_members". + # The "circular" member does not participate in strict equivalence. + fields = filter( + lambda field: field != "circular", self._fields + ) + result = all([func(field) for field in fields]) + + return result + + def __lt__(self, other): + # + # Support Python2 behaviour for a "<" operation involving a + # "NoneType" operand. + # + if not isinstance(other, self.__class__): + return NotImplemented + + def _sort_key(item): + keys = [] + for field in item._fields: + if field != "attributes": + value = getattr(item, field) + keys.extend((value is not None, value)) + return tuple(keys) + + return _sort_key(self) < _sort_key(other) + + def __ne__(self, other): + result = self.__eq__(other) + if result is not NotImplemented: + result = not result + + return result + + def _api_common( + self, other, func_service, func_operation, action, lenient=None + ): + """ + Common entry-point for lenient metadata API methods. + + Args: + + * other (metadata): + A metadata instance of the same type. + + * func_service (callable): + The parent service method offering the API entry-point to the service. + + * func_operation (callable): + The parent service method that provides the actual service. + + * action (str): + The verb describing the service operation. + + Kwargs: + + * lenient (boolean): + Enable/disable the lenient service operation. The default is to automatically + detect whether this lenient service operation is enabled. + + Returns: + The result of the service operation to the parent service caller. + + """ + # Ensure that we have similar class instances. + if ( + not hasattr(other, "__class__") + or other.__class__ is not self.__class__ + ): + emsg = "Cannot {} {!r} with {!r}." + raise TypeError( + emsg.format(action, self.__class__.__name__, type(other)) + ) + + if lenient is None: + result = func_operation(other) + else: + if lenient: + # Use qualname to disassociate from the instance bounded method. + args, kwargs = (qualname(func_service),), dict() + else: + # Use qualname to guarantee that the instance bounded method + # is a hashable key. + args, kwargs = (), {qualname(func_service): False} + + with _LENIENT.context(*args, **kwargs): + result = func_operation(other) + + return result + + def _combine(self, other): + """Perform associated metadata member combination.""" + if _LENIENT(self.combine): + # Perform "lenient" combine. + logger.debug("lenient", extra=dict(cls=self.__class__.__name__)) + values = self._combine_lenient(other) + else: + # Perform "strict" combine. + logger.debug("strict", extra=dict(cls=self.__class__.__name__)) + + def func(field): + left = getattr(self, field) + right = getattr(other, field) + if self._is_attributes(field, left, right): + result = self._combine_strict_attributes(left, right) + else: + result = left if left == right else None + return result + + # Note that, for strict we use "_fields" not "_members". + values = [func(field) for field in self._fields] + + return values + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members. + + Args: + + * other (BaseMetadata): + The other metadata participating in the lenient combination. + + Returns: + A list of combined metadata member values. + + """ + + def func(field): + left = getattr(self, field) + right = getattr(other, field) + result = None + if field == "units": + # Perform "strict" combination for "units". + result = left if left == right else None + elif self._is_attributes(field, left, right): + result = self._combine_lenient_attributes(left, right) + else: + if left == right: + result = left + elif left is None: + result = right + elif right is None: + result = left + return result + + # Note that, we use "_members" not "_fields". + return [func(field) for field in BaseMetadata._members] + + @staticmethod + def _combine_lenient_attributes(left, right): + """Leniently combine the dictionary members together.""" + # Copy the dictionaries. + left = deepcopy(left) + right = deepcopy(right) + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} + # Intersection of common items. + common = sleft & sright + # Items in sleft different from sright. + dsleft = dict(sleft - sright) + # Items in sright different from sleft. + dsright = dict(sright - sleft) + # Intersection of common item keys with different values. + keys = set(dsleft.keys()) & set(dsright.keys()) + # Remove (in-place) common item keys with different values. + [dsleft.pop(key) for key in keys] + [dsright.pop(key) for key in keys] + # Now bring the result together. + result = {k: left[k] for k, _ in common} + result.update({k: left[k] for k in dsleft.keys()}) + result.update({k: right[k] for k in dsright.keys()}) + + return result + + @staticmethod + def _combine_strict_attributes(left, right): + """Perform strict combination of the dictionary members.""" + # Copy the dictionaries. + left = deepcopy(left) + right = deepcopy(right) + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} + # Intersection of common items. + common = sleft & sright + # Now bring the result together. + result = {k: left[k] for k, _ in common} + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members. + + Args: + + * other (BaseMetadata): + The other metadata participating in the lenient comparison. + + Returns: + Boolean. + + """ + result = False + + # Use the "name" method to leniently compare "standard_name", + # "long_name", and "var_name" in a well defined way. + if self.name() == other.name(): + + def func(field): + left = getattr(self, field) + right = getattr(other, field) + if field == "units": + # Perform "strict" compare for "units". + result = left == right + elif self._is_attributes(field, left, right): + result = self._compare_lenient_attributes(left, right) + else: + # Perform "lenient" compare for members. + result = (left == right) or left is None or right is None + return result + + # Note that, we use "_members" not "_fields". + # Lenient equality explicitly ignores the "var_name" member. + result = all( + [ + func(field) + for field in BaseMetadata._members + if field != "var_name" + ] + ) + + return result + + @staticmethod + def _compare_lenient_attributes(left, right): + """Perform lenient compare between the dictionary members.""" + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} + # Items in sleft different from sright. + dsleft = dict(sleft - sright) + # Items in sright different from sleft. + dsright = dict(sright - sleft) + # Intersection of common item keys with different values. + keys = set(dsleft.keys()) & set(dsright.keys()) + + return not bool(keys) + + @staticmethod + def _compare_strict_attributes(left, right): + """Perform strict compare between the dictionary members.""" + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} + + return sleft == sright + + def _difference(self, other): + """Perform associated metadata member difference.""" + if _LENIENT(self.difference): + # Perform "lenient" difference. + logger.debug("lenient", extra=dict(cls=self.__class__.__name__)) + values = self._difference_lenient(other) + else: + # Perform "strict" difference. + logger.debug("strict", extra=dict(cls=self.__class__.__name__)) + + def func(field): + left = getattr(self, field) + right = getattr(other, field) + if self._is_attributes(field, left, right): + result = self._difference_strict_attributes(left, right) + else: + result = None if left == right else (left, right) + return result + + # Note that, for strict we use "_fields" not "_members". + values = [func(field) for field in self._fields] + + return values + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members. + + Args: + + * other (BaseMetadata): + The other metadata participating in the lenient difference. + + Returns: + A list of difference metadata member values. + + """ + + def func(field): + left = getattr(self, field) + right = getattr(other, field) + if field == "units": + # Perform "strict" difference for "units". + result = None if left == right else (left, right) + elif self._is_attributes(field, left, right): + result = self._difference_lenient_attributes(left, right) + else: + # Perform "lenient" difference for members. + result = ( + (left, right) + if left is not None and right is not None and left != right + else None + ) + return result + + # Note that, we use "_members" not "_fields". + return [func(field) for field in BaseMetadata._members] + + @staticmethod + def _difference_lenient_attributes(left, right): + """Perform lenient difference between the dictionary members.""" + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} + # Items in sleft different from sright. + dsleft = dict(sleft - sright) + # Items in sright different from sleft. + dsright = dict(sright - sleft) + # Intersection of common item keys with different values. + keys = set(dsleft.keys()) & set(dsright.keys()) + # Keep (in-place) common item keys with different values. + [dsleft.pop(key) for key in list(dsleft.keys()) if key not in keys] + [dsright.pop(key) for key in list(dsright.keys()) if key not in keys] + + if not bool(dsleft) and not bool(dsright): + result = None + else: + # Replace hash-rvalue with original rvalue. + dsleft = {k: left[k] for k in dsleft.keys()} + dsright = {k: right[k] for k in dsright.keys()} + result = (dsleft, dsright) + + return result + + @staticmethod + def _difference_strict_attributes(left, right): + """Perform strict difference between the dictionary members.""" + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} + # Items in sleft different from sright. + dsleft = dict(sleft - sright) + # Items in sright different from sleft. + dsright = dict(sright - sleft) + + if not bool(dsleft) and not bool(dsright): + result = None + else: + # Replace hash-rvalue with original rvalue. + dsleft = {k: left[k] for k in dsleft.keys()} + dsright = {k: right[k] for k in dsright.keys()} + result = (dsleft, dsright) + + return result + + @staticmethod + def _is_attributes(field, left, right): + """Determine whether we have two 'attributes' dictionaries.""" + return ( + field == "attributes" + and isinstance(left, Mapping) + and isinstance(right, Mapping) + ) + + @lenient_service + def combine(self, other, lenient=None): + """ + Return a new metadata instance created by combining each of the + associated metadata members. + + Args: + + * other (metadata): + A metadata instance of the same type. + + Kwargs: + + * lenient (boolean): + Enable/disable lenient combination. The default is to automatically + detect whether this lenient operation is enabled. + + Returns: + Metadata instance. + + """ + result = self._api_common( + other, self.combine, self._combine, "combine", lenient=lenient + ) + return self.__class__(*result) + + @lenient_service + def difference(self, other, lenient=None): + """ + Return a new metadata instance created by performing a difference + comparison between each of the associated metadata members. + + A metadata member returned with a value of "None" indicates that there + is no difference between the members being compared. Otherwise, a tuple + of the different values is returned. + + Args: + + * other (metadata): + A metadata instance of the same type. + + Kwargs: + + * lenient (boolean): + Enable/disable lenient difference. The default is to automatically + detect whether this lenient operation is enabled. + + Returns: + Metadata instance of member differences or None. + + """ + result = self._api_common( + other, self.difference, self._difference, "differ", lenient=lenient + ) + result = ( + None + if all([item is None for item in result]) + else self.__class__(*result) + ) + return result + + @lenient_service + def equal(self, other, lenient=None): + """ + Determine whether the associated metadata members are equivalent. + + Args: + + * other (metadata): + A metadata instance of the same type. + + Kwargs: + + * lenient (boolean): + Enable/disable lenient equivalence. The default is to automatically + detect whether this lenient operation is enabled. + + Returns: + Boolean. + + """ + result = self._api_common( + other, self.equal, self.__eq__, "compare", lenient=lenient + ) + return result + + @classmethod + def from_metadata(cls, other): + result = None + if isinstance(other, BaseMetadata): + if other.__class__ is cls: + result = other + else: + kwargs = {field: None for field in cls._fields} + fields = set(cls._fields) & set(other._fields) + for field in fields: + kwargs[field] = getattr(other, field) + result = cls(**kwargs) + return result + + def name(self, default=None, token=False): + """ + Returns a string name representing the identity of the metadata. + + First it tries standard name, then it tries the long name, then + the NetCDF variable name, before falling-back to a default value, + which itself defaults to the string 'unknown'. + + Kwargs: + + * default: + The fall-back string representing the default name. Defaults to + the string 'unknown'. + * token: + If True, ensures that the name returned satisfies the criteria for + the characters required by a valid NetCDF name. If it is not + possible to return a valid name, then a ValueError exception is + raised. Defaults to False. + + Returns: + String. + + """ + + def _check(item): + return self.token(item) if token else item + + default = self.DEFAULT_NAME if default is None else default + + result = ( + _check(self.standard_name) + or _check(self.long_name) + or _check(self.var_name) + or _check(default) + ) + + if token and result is None: + emsg = "Cannot retrieve a valid name token from {!r}" + raise ValueError(emsg.format(self)) + + return result + + @classmethod + def token(cls, name): + """ + Determine whether the provided name is a valid NetCDF name and thus + safe to represent a single parsable token. + + Args: + + * name: + The string name to verify + + Returns: + The provided name if valid, otherwise None. + + """ + if name is not None: + result = _TOKEN_PARSE.match(name) + name = result if result is None else name + + return name + + +class AncillaryVariableMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.coords.AncillaryVariableMetadata`. + + """ + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + return super().__eq__(other) + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + return super().equal(other, lenient=lenient) + + +class CellMeasureMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.coords.CellMeasure`. + + """ + + _members = "measure" + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + return super().__eq__(other) + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members for cell measures. + + Args: + + * other (CellMeasureMetadata): + The other cell measure metadata participating in the lenient + combination. + + Returns: + A list of combined metadata member values. + + """ + # Perform "strict" combination for "measure". + value = self.measure if self.measure == other.measure else None + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.append(value) + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members for cell measures. + + Args: + + * other (CellMeasureMetadata): + The other cell measure metadata participating in the lenient + comparison. + + Returns: + Boolean. + + """ + # Perform "strict" comparison for "measure". + result = self.measure == other.measure + if result: + # Perform lenient comparison of the other parent members. + result = super()._compare_lenient(other) + + return result + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members for cell measures. + + Args: + + * other (CellMeasureMetadata): + The other cell measure metadata participating in the lenient + difference. + + Returns: + A list of difference metadata member values. + + """ + # Perform "strict" difference for "measure". + value = ( + None + if self.measure == other.measure + else (self.measure, other.measure) + ) + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.append(value) + + return result + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + return super().equal(other, lenient=lenient) + + +class CoordMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.coords.Coord`. + + """ + + _members = ("coord_system", "climatological") + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + # Convert a DimCoordMetadata instance to a CoordMetadata instance. + if ( + self.__class__ is CoordMetadata + and hasattr(other, "__class__") + and other.__class__ is DimCoordMetadata + ): + other = self.from_metadata(other) + return super().__eq__(other) + + def __lt__(self, other): + # + # Support Python2 behaviour for a "<" operation involving a + # "NoneType" operand. + # + if not isinstance(other, BaseMetadata): + return NotImplemented + + if other.__class__ is DimCoordMetadata: + other = self.from_metadata(other) + + if not isinstance(other, self.__class__): + return NotImplemented + + def _sort_key(item): + keys = [] + for field in item._fields: + if field not in ("attributes", "coord_system"): + value = getattr(item, field) + keys.extend((value is not None, value)) + return tuple(keys) + + return _sort_key(self) < _sort_key(other) + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members for coordinates. + + Args: + + * other (CoordMetadata): + The other coordinate metadata participating in the lenient + combination. + + Returns: + A list of combined metadata member values. + + """ + # Perform "strict" combination for "coord_system" and "climatological". + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return left if left == right else None + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in CoordMetadata._members] + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.extend(values) + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members for coordinates. + + Args: + + * other (CoordMetadata): + The other coordinate metadata participating in the lenient + comparison. + + Returns: + Boolean. + + """ + # Perform "strict" comparison for "coord_system" and "climatological". + result = all( + [ + getattr(self, field) == getattr(other, field) + for field in CoordMetadata._members + ] + ) + if result: + # Perform lenient comparison of the other parent members. + result = super()._compare_lenient(other) + + return result + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members for coordinates. + + Args: + + * other (CoordMetadata): + The other coordinate metadata participating in the lenient + difference. + + Returns: + A list of difference metadata member values. + + """ + # Perform "strict" difference for "coord_system" and "climatological". + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return None if left == right else (left, right) + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in CoordMetadata._members] + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.extend(values) + + return result + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + # Convert a DimCoordMetadata instance to a CoordMetadata instance. + if ( + self.__class__ is CoordMetadata + and hasattr(other, "__class__") + and other.__class__ is DimCoordMetadata + ): + other = self.from_metadata(other) + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + # Convert a DimCoordMetadata instance to a CoordMetadata instance. + if ( + self.__class__ is CoordMetadata + and hasattr(other, "__class__") + and other.__class__ is DimCoordMetadata + ): + other = self.from_metadata(other) + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + # Convert a DimCoordMetadata instance to a CoordMetadata instance. + if ( + self.__class__ is CoordMetadata + and hasattr(other, "__class__") + and other.__class__ is DimCoordMetadata + ): + other = self.from_metadata(other) + return super().equal(other, lenient=lenient) + + +class CubeMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.cube.Cube`. + + """ + + _members = "cell_methods" + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + return super().__eq__(other) + + def __lt__(self, other): + # + # Support Python2 behaviour for a "<" operation involving a + # "NoneType" operand. + # + if not isinstance(other, self.__class__): + return NotImplemented + + def _sort_key(item): + keys = [] + for field in item._fields: + if field not in ("attributes", "cell_methods"): + value = getattr(item, field) + keys.extend((value is not None, value)) + return tuple(keys) + + return _sort_key(self) < _sort_key(other) + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members for cubes. + + Args: + + * other (CubeMetadata): + The other cube metadata participating in the lenient combination. + + Returns: + A list of combined metadata member values. + + """ + # Perform "strict" combination for "cell_methods". + value = ( + self.cell_methods + if self.cell_methods == other.cell_methods + else None + ) + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.append(value) + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members for cubes. + + Args: + + * other (CubeMetadata): + The other cube metadata participating in the lenient comparison. + + Returns: + Boolean. + + """ + # Perform "strict" comparison for "cell_methods". + result = self.cell_methods == other.cell_methods + if result: + result = super()._compare_lenient(other) + + return result + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members for cubes. + + Args: + + * other (CubeMetadata): + The other cube metadata participating in the lenient difference. + + Returns: + A list of difference metadata member values. + + """ + # Perform "strict" difference for "cell_methods". + value = ( + None + if self.cell_methods == other.cell_methods + else (self.cell_methods, other.cell_methods) + ) + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.append(value) + + return result + + @property + def _names(self): + """ + A tuple containing the value of each name participating in the identity + of a :class:`iris.cube.Cube`. This includes the standard name, + long name, NetCDF variable name, and the STASH from the attributes + dictionary. + + """ + standard_name = self.standard_name + long_name = self.long_name + var_name = self.var_name + + # Defensive enforcement of attributes being a dictionary. + if not isinstance(self.attributes, Mapping): + try: + self.attributes = dict() + except AttributeError: + emsg = "Invalid '{}.attributes' member, must be a mapping." + raise AttributeError(emsg.format(self.__class__.__name__)) + + stash_name = self.attributes.get("STASH") + if stash_name is not None: + stash_name = str(stash_name) + + return standard_name, long_name, var_name, stash_name + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + return super().equal(other, lenient=lenient) + + @wraps(BaseMetadata.name) + def name(self, default=None, token=False): + def _check(item): + return self.token(item) if token else item + + default = self.DEFAULT_NAME if default is None else default + + # Defensive enforcement of attributes being a dictionary. + if not isinstance(self.attributes, Mapping): + try: + self.attributes = dict() + except AttributeError: + emsg = "Invalid '{}.attributes' member, must be a mapping." + raise AttributeError(emsg.format(self.__class__.__name__)) + + result = ( + _check(self.standard_name) + or _check(self.long_name) + or _check(self.var_name) + or _check(str(self.attributes.get("STASH", ""))) + or _check(default) + ) + + if token and result is None: + emsg = "Cannot retrieve a valid name token from {!r}" + raise ValueError(emsg.format(self)) + + return result + + +class DimCoordMetadata(CoordMetadata): + """ + Metadata container for a :class:`~iris.coords.DimCoord" + + """ + + # The "circular" member is stateful only, and does not participate + # in lenient/strict equivalence. + _members = ("circular",) + + __slots__ = () + + @wraps(CoordMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + # Convert a CoordMetadata instance to a DimCoordMetadata instance. + if hasattr(other, "__class__") and other.__class__ is CoordMetadata: + other = self.from_metadata(other) + return super().__eq__(other) + + def __lt__(self, other): + # + # Support Python2 behaviour for a "<" operation involving a + # "NoneType" operand. + # + if not isinstance(other, BaseMetadata): + return NotImplemented + + if other.__class__ is CoordMetadata: + other = self.from_metadata(other) + + if not isinstance(other, self.__class__): + return NotImplemented + + def _sort_key(item): + keys = [] + for field in item._fields: + if field not in ("attributes", "coord_system"): + value = getattr(item, field) + keys.extend((value is not None, value)) + return tuple(keys) + + return _sort_key(self) < _sort_key(other) + + @wraps(CoordMetadata._combine_lenient, assigned=("__doc__",), updated=()) + def _combine_lenient(self, other): + # Perform "strict" combination for "circular". + value = self.circular if self.circular == other.circular else None + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.append(value) + + return result + + @wraps(CoordMetadata._compare_lenient, assigned=("__doc__",), updated=()) + def _compare_lenient(self, other): + # The "circular" member is not part of lenient equivalence. + return super()._compare_lenient(other) + + @wraps( + CoordMetadata._difference_lenient, assigned=("__doc__",), updated=() + ) + def _difference_lenient(self, other): + # Perform "strict" difference for "circular". + value = ( + None + if self.circular == other.circular + else (self.circular, other.circular) + ) + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.append(value) + + return result + + @wraps(CoordMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + # Convert a CoordMetadata instance to a DimCoordMetadata instance. + if hasattr(other, "__class__") and other.__class__ is CoordMetadata: + other = self.from_metadata(other) + return super().combine(other, lenient=lenient) + + @wraps(CoordMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + # Convert a CoordMetadata instance to a DimCoordMetadata instance. + if hasattr(other, "__class__") and other.__class__ is CoordMetadata: + other = self.from_metadata(other) + return super().difference(other, lenient=lenient) + + @wraps(CoordMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + # Convert a CoordMetadata instance to a DimCoordMetadata instance. + if hasattr(other, "__class__") and other.__class__ is CoordMetadata: + other = self.from_metadata(other) + return super().equal(other, lenient=lenient) + + +def metadata_manager_factory(cls, **kwargs): + """ + A class instance factory function responsible for manufacturing + metadata instances dynamically at runtime. + + The factory instances returned by the factory are capable of managing + their metadata state, which can be proxied by the owning container. + + Args: + + * cls: + A subclass of :class:`~iris.common.metadata.BaseMetadata`, defining + the metadata to be managed. + + Kwargs: + + * kwargs: + Initial values for the manufactured metadata instance. Unspecified + fields will default to a value of 'None'. + + """ + + def __init__(self, cls, **kwargs): + # Restrict to only dealing with appropriate metadata classes. + if not issubclass(cls, BaseMetadata): + emsg = "Require a subclass of {!r}, got {!r}." + raise TypeError(emsg.format(BaseMetadata.__name__, cls)) + + #: The metadata class to be manufactured by this factory. + self.cls = cls + + # Initialise the metadata class fields in the instance. + for field in self.fields: + setattr(self, field, None) + + # Populate with provided kwargs, which have already been verified + # by the factory. + for field, value in kwargs.items(): + setattr(self, field, value) + + def __eq__(self, other): + if not hasattr(other, "cls"): + return NotImplemented + match = self.cls is other.cls + if match: + match = self.values == other.values + + return match + + def __getstate__(self): + """Return the instance state to be pickled.""" + return {field: getattr(self, field) for field in self.fields} + + def __ne__(self, other): + match = self.__eq__(other) + if match is not NotImplemented: + match = not match + + return match + + def __reduce__(self): + """ + Dynamically created classes at runtime cannot be pickled, due to not + being defined at the top level of a module. As a result, we require to + use the __reduce__ interface to allow 'pickle' to recreate this class + instance, and dump and load instance state successfully. + + """ + return metadata_manager_factory, (self.cls,), self.__getstate__() + + def __repr__(self): + args = ", ".join( + [ + "{}={!r}".format(field, getattr(self, field)) + for field in self.fields + ] + ) + return "{}({})".format(self.__class__.__name__, args) + + def __setstate__(self, state): + """Set the instance state when unpickling.""" + for field, value in state.items(): + setattr(self, field, value) + + @property + def fields(self): + """Return the name of the metadata members.""" + # Proxy for built-in namedtuple._fields property. + return self.cls._fields + + @property + def values(self): + fields = {field: getattr(self, field) for field in self.fields} + return self.cls(**fields) + + # Restrict factory to appropriate metadata classes only. + if not issubclass(cls, BaseMetadata): + emsg = "Require a subclass of {!r}, got {!r}." + raise TypeError(emsg.format(BaseMetadata.__name__, cls)) + + # Check whether kwargs have valid fields for the specified metadata. + if kwargs: + extra = [field for field in kwargs.keys() if field not in cls._fields] + if extra: + bad = ", ".join(map(lambda field: "{!r}".format(field), extra)) + emsg = "Invalid {!r} field parameters, got {}." + raise ValueError(emsg.format(cls.__name__, bad)) + + # Define the name, (inheritance) bases and namespace of the dynamic class. + name = "MetadataManager" + bases = () + namespace = { + "DEFAULT_NAME": cls.DEFAULT_NAME, + "__init__": __init__, + "__eq__": __eq__, + "__getstate__": __getstate__, + "__ne__": __ne__, + "__reduce__": __reduce__, + "__repr__": __repr__, + "__setstate__": __setstate__, + "fields": fields, + "name": cls.name, + "token": cls.token, + "values": values, + } + + # Account for additional "CubeMetadata" specialised class behaviour. + if cls is CubeMetadata: + namespace["_names"] = cls._names + + # Dynamically create the class. + Metadata = type(name, bases, namespace) + # Now manufacture an instance of that class. + metadata = Metadata(cls, **kwargs) + + return metadata + + +#: Convenience collection of lenient metadata combine services. +SERVICES_COMBINE = ( + AncillaryVariableMetadata.combine, + BaseMetadata.combine, + CellMeasureMetadata.combine, + CoordMetadata.combine, + CubeMetadata.combine, + DimCoordMetadata.combine, +) + + +#: Convenience collection of lenient metadata difference services. +SERVICES_DIFFERENCE = ( + AncillaryVariableMetadata.difference, + BaseMetadata.difference, + CellMeasureMetadata.difference, + CoordMetadata.difference, + CubeMetadata.difference, + DimCoordMetadata.difference, +) + + +#: Convenience collection of lenient metadata equality services. +SERVICES_EQUAL = ( + AncillaryVariableMetadata.__eq__, + AncillaryVariableMetadata.equal, + BaseMetadata.__eq__, + BaseMetadata.equal, + CellMeasureMetadata.__eq__, + CellMeasureMetadata.equal, + CoordMetadata.__eq__, + CoordMetadata.equal, + CubeMetadata.__eq__, + CubeMetadata.equal, + DimCoordMetadata.__eq__, + DimCoordMetadata.equal, +) + + +#: Convenience collection of lenient metadata services. +SERVICES = SERVICES_COMBINE + SERVICES_DIFFERENCE + SERVICES_EQUAL diff --git a/lib/iris/_cube_coord_common.py b/lib/iris/common/mixin.py similarity index 51% rename from lib/iris/_cube_coord_common.py rename to lib/iris/common/mixin.py index 541780ca15..50ef561036 100644 --- a/lib/iris/_cube_coord_common.py +++ b/lib/iris/common/mixin.py @@ -5,43 +5,20 @@ # licensing details. -from collections import namedtuple +from collections.abc import Mapping +from functools import wraps import re import cf_units +from iris.common import BaseMetadata import iris.std_names -# https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name -_TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""") +__all__ = ["CFVariableMixin"] -class Names( - namedtuple("Names", ["standard_name", "long_name", "var_name", "STASH"]) -): - """ - Immutable container for name metadata. - - Args: - - * standard_name: - A string representing the CF Conventions and Metadata standard name, or - None. - * long_name: - A string representing the CF Conventions and Metadata long name, or - None - * var_name: - A string representing the associated NetCDF variable name, or None. - * STASH: - A string representing the `~iris.fileformats.pp.STASH` code, or None. - - """ - - __slots__ = () - - -def get_valid_standard_name(name): +def _get_valid_standard_name(name): # Standard names are optionally followed by a standard name # modifier, separated by one or more blank spaces @@ -100,7 +77,7 @@ def __init__(self, *args, **kwargs): # Check validity of keys for key in self.keys(): if key in self._forbidden_keys: - raise ValueError("%r is not a permitted attribute" % key) + raise ValueError(f"{key!r} is not a permitted attribute") def __eq__(self, other): # Extend equality to allow for NumPy arrays. @@ -121,7 +98,7 @@ def __ne__(self, other): def __setitem__(self, key, value): if key in self._forbidden_keys: - raise ValueError("%r is not a permitted attribute" % key) + raise ValueError(f"{key!r} is not a permitted attribute") dict.__setitem__(self, key, value) def update(self, other, **kwargs): @@ -137,92 +114,15 @@ def update(self, other, **kwargs): # Check validity of keys for key in keys: if key in self._forbidden_keys: - raise ValueError("%r is not a permitted attribute" % key) + raise ValueError(f"{key!r} is not a permitted attribute") dict.update(self, other, **kwargs) class CFVariableMixin: - - _DEFAULT_NAME = "unknown" # the name default string - - @staticmethod - def token(name): - """ - Determine whether the provided name is a valid NetCDF name and thus - safe to represent a single parsable token. - - Args: - - * name: - The string name to verify - - Returns: - The provided name if valid, otherwise None. - - """ - if name is not None: - result = _TOKEN_PARSE.match(name) - name = result if result is None else name - return name - - def name(self, default=None, token=False): - """ - Returns a human-readable name. - - First it tries :attr:`standard_name`, then 'long_name', then - 'var_name', then the STASH attribute before falling back to - the value of `default` (which itself defaults to 'unknown'). - - Kwargs: - - * default: - The value of the default name. - * token: - If true, ensure that the name returned satisfies the criteria for - the characters required by a valid NetCDF name. If it is not - possible to return a valid name, then a ValueError exception is - raised. - - Returns: - String. - - """ - - def _check(item): - return self.token(item) if token else item - - default = self._DEFAULT_NAME if default is None else default - - result = ( - _check(self.standard_name) - or _check(self.long_name) - or _check(self.var_name) - or _check(str(self.attributes.get("STASH", ""))) - or _check(default) - ) - - if token and result is None: - emsg = "Cannot retrieve a valid name token from {!r}" - raise ValueError(emsg.format(self)) - - return result - - @property - def names(self): - """ - A tuple containing all of the metadata names. This includes the - standard name, long name, NetCDF variable name, and attributes - STASH name. - - """ - standard_name = self.standard_name - long_name = self.long_name - var_name = self.var_name - stash_name = self.attributes.get("STASH") - if stash_name is not None: - stash_name = str(stash_name) - return Names(standard_name, long_name, var_name, stash_name) + @wraps(BaseMetadata.name) + def name(self, default=None, token=None): + return self._metadata_manager.name(default=default, token=token) def rename(self, name): """ @@ -245,40 +145,99 @@ def rename(self, name): @property def standard_name(self): - """The standard name for the Cube's data.""" - return self._standard_name + """The CF Metadata standard name for the object.""" + return self._metadata_manager.standard_name @standard_name.setter def standard_name(self, name): - self._standard_name = get_valid_standard_name(name) + self._metadata_manager.standard_name = _get_valid_standard_name(name) @property - def units(self): - """The :mod:`~cf_units.Unit` instance of the object.""" - return self._units + def long_name(self): + """The CF Metadata long name for the object.""" + return self._metadata_manager.long_name - @units.setter - def units(self, unit): - self._units = cf_units.as_unit(unit) + @long_name.setter + def long_name(self, name): + self._metadata_manager.long_name = name @property def var_name(self): - """The netCDF variable name for the object.""" - return self._var_name + """The NetCDF variable name for the object.""" + return self._metadata_manager.var_name @var_name.setter def var_name(self, name): if name is not None: - result = self.token(name) + result = self._metadata_manager.token(name) if result is None or not name: emsg = "{!r} is not a valid NetCDF variable name." raise ValueError(emsg.format(name)) - self._var_name = name + self._metadata_manager.var_name = name + + @property + def units(self): + """The S.I. unit of the object.""" + return self._metadata_manager.units + + @units.setter + def units(self, unit): + self._metadata_manager.units = cf_units.as_unit(unit) @property def attributes(self): - return self._attributes + return self._metadata_manager.attributes @attributes.setter def attributes(self, attributes): - self._attributes = LimitedAttributeDict(attributes or {}) + self._metadata_manager.attributes = LimitedAttributeDict( + attributes or {} + ) + + @property + def metadata(self): + return self._metadata_manager.values + + @metadata.setter + def metadata(self, metadata): + cls = self._metadata_manager.cls + fields = self._metadata_manager.fields + arg = metadata + + try: + # Try dict-like initialisation... + metadata = cls(**metadata) + except TypeError: + try: + # Try iterator/namedtuple-like initialisation... + metadata = cls(*metadata) + except TypeError: + if hasattr(metadata, "_asdict"): + metadata = metadata._asdict() + + if isinstance(metadata, Mapping): + fields = [field for field in fields if field in metadata] + else: + # Generic iterable/container with no associated keys. + missing = [ + field + for field in fields + if not hasattr(metadata, field) + ] + + if missing: + missing = ", ".join( + map(lambda i: "{!r}".format(i), missing) + ) + emsg = "Invalid {!r} metadata, require {} to be specified." + raise TypeError(emsg.format(type(arg), missing)) + + for field in fields: + if hasattr(metadata, field): + value = getattr(metadata, field) + else: + value = metadata[field] + + # Ensure to always set state through the individual mixin/container + # setter functions. + setattr(self, field, value) diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py new file mode 100644 index 0000000000..7098eaa65e --- /dev/null +++ b/lib/iris/common/resolve.py @@ -0,0 +1,1542 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +from collections import namedtuple +from collections.abc import Iterable +import logging + +from dask.array.core import broadcast_shapes +import numpy as np + +from iris.common import LENIENT + + +__all__ = ["Resolve"] + + +# Configure the logger. +logger = logging.getLogger(__name__) + + +_AuxCoverage = namedtuple( + "AuxCoverage", + [ + "cube", + "common_items_aux", + "common_items_scalar", + "local_items_aux", + "local_items_scalar", + "dims_common", + "dims_local", + "dims_free", + ], +) + +_CategoryItems = namedtuple( + "CategoryItems", ["items_dim", "items_aux", "items_scalar"], +) + +_DimCoverage = namedtuple( + "DimCoverage", + ["cube", "metadata", "coords", "dims_common", "dims_local", "dims_free"], +) + +_Item = namedtuple("Item", ["metadata", "coord", "dims"]) + +_PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"]) + +_PreparedItem = namedtuple( + "PreparedItem", ["metadata", "points", "bounds", "dims", "container"], +) + +_PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"]) + + +class Resolve: + def __init__(self, lhs=None, rhs=None): + if lhs is not None or rhs is not None: + self(lhs, rhs) + + def __call__(self, lhs, rhs): + self._init(lhs, rhs) + + self._metadata_resolve() + self._metadata_coverage() + + if self._debug: + self._debug_items(self.lhs_cube_category_local, title="LHS local") + self._debug_items(self.rhs_cube_category_local, title="RHS local") + self._debug_items(self.category_common, title="common") + logger.debug(f"map_rhs_to_lhs={self.map_rhs_to_lhs}") + + self._metadata_mapping() + self._metadata_prepare() + + def _as_compatible_cubes(self): + from iris.cube import Cube + + src_cube = self._src_cube + tgt_cube = self._tgt_cube + + # Use the mapping to calculate the new src cube shape. + new_src_shape = [1] * tgt_cube.ndim + for src_dim, tgt_dim in self.mapping.items(): + new_src_shape[tgt_dim] = src_cube.shape[src_dim] + new_src_shape = tuple(new_src_shape) + dmsg = ( + f"new src {self._src_cube_position} cube shape {new_src_shape}, " + f"actual shape {src_cube.shape}" + ) + logger.debug(dmsg) + + try: + # Determine whether the tgt cube shape and proposed new src + # cube shape will successfully broadcast together. + self._broadcast_shape = broadcast_shapes( + tgt_cube.shape, new_src_shape + ) + except ValueError: + emsg = ( + "Cannot resolve cubes, as a suitable transpose of the " + f"{self._src_cube_position} cube {src_cube.name()!r} " + f"will not broadcast with the {self._tgt_cube_position} cube " + f"{tgt_cube.name()!r}." + ) + raise ValueError(emsg) + + new_src_data = src_cube.core_data().copy() + + # Use the mapping to determine the transpose sequence of + # src dimensions in increasing tgt dimension order. + order = [ + src_dim + for src_dim, tgt_dim in sorted( + self.mapping.items(), key=lambda pair: pair[1] + ) + ] + + # Determine whether a transpose of the src cube is necessary. + if order != sorted(order): + new_src_data = new_src_data.transpose(order) + logger.debug( + f"transpose src {self._src_cube_position} cube with order {order}" + ) + + # Determine whether a reshape is necessary. + if new_src_shape != new_src_data.shape: + new_src_data = new_src_data.reshape(new_src_shape) + logger.debug( + f"reshape src {self._src_cube_position} cube to new shape {new_src_shape}" + ) + + # Create the new src cube. + new_src_cube = Cube(new_src_data) + new_src_cube.metadata = src_cube.metadata + + def add_coord(coord, dim_coord=False): + src_dims = src_cube.coord_dims(coord) + tgt_dims = [self.mapping[src_dim] for src_dim in src_dims] + if dim_coord: + new_src_cube.add_dim_coord(coord, tgt_dims) + else: + new_src_cube.add_aux_coord(coord, tgt_dims) + + # Add the dim coordinates to the new src cube. + for coord in src_cube.dim_coords: + add_coord(coord, dim_coord=True) + + # Add the aux and scalar coordinates to the new src cube. + for coord in src_cube.aux_coords: + add_coord(coord) + + # Add the aux factories to the new src cube. + for factory in src_cube.aux_factories: + new_src_cube.add_aux_factory(factory) + + # Set the resolved cubes. + self._src_cube_resolved = new_src_cube + self._tgt_cube_resolved = tgt_cube + + @staticmethod + def _aux_coverage( + cube, + cube_items_aux, + cube_items_scalar, + common_aux_metadata, + common_scalar_metadata, + ): + common_items_aux = [] + common_items_scalar = [] + local_items_aux = [] + local_items_scalar = [] + dims_common = [] + dims_local = [] + dims_free = set(range(cube.ndim)) + + for item in cube_items_aux: + [dims_free.discard(dim) for dim in item.dims] + + if item.metadata in common_aux_metadata: + common_items_aux.append(item) + dims_common.extend(item.dims) + else: + local_items_aux.append(item) + dims_local.extend(item.dims) + + for item in cube_items_scalar: + if item.metadata in common_scalar_metadata: + common_items_scalar.append(item) + else: + local_items_scalar.append(item) + + return _AuxCoverage( + cube=cube, + common_items_aux=common_items_aux, + common_items_scalar=common_items_scalar, + local_items_aux=local_items_aux, + local_items_scalar=local_items_scalar, + dims_common=sorted(set(dims_common)), + dims_local=sorted(set(dims_local)), + dims_free=sorted(dims_free), + ) + + def _aux_mapping(self, src_coverage, tgt_coverage): + for tgt_item in tgt_coverage.common_items_aux: + # Search for a src aux metadata match. + tgt_metadata = tgt_item.metadata + src_items = tuple( + filter( + lambda src_item: src_item.metadata == tgt_metadata, + src_coverage.common_items_aux, + ) + ) + if src_items: + # Multiple matching src metadata must cover the same src + # dimensions. + src_dims = src_items[0].dims + if all(map(lambda item: item.dims == src_dims, src_items)): + # Ensure src and tgt have equal rank. + tgt_dims = tgt_item.dims + if len(src_dims) == len(tgt_dims): + for src_dim, tgt_dim in zip(src_dims, tgt_dims): + self.mapping[src_dim] = tgt_dim + logger.debug(f"{src_dim}->{tgt_dim}") + else: + # This situation can only occur due to a systemic internal + # failure to correctly identify common aux coordinate metadata + # coverage between the cubes. + emsg = ( + "Failed to map common aux coordinate metadata from " + "source cube {!r} to target cube {!r}, using {!r} on " + "target cube dimension{} {}." + ) + raise ValueError( + emsg.format( + src_coverage.cube.name(), + tgt_coverage.cube.name(), + tgt_metadata, + "s" if len(tgt_item.dims) > 1 else "", + tgt_item.dims, + ) + ) + + @staticmethod + def _categorise_items(cube): + category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) + + # Categorise the dim coordinates of the cube. + for coord in cube.dim_coords: + item = _Item( + metadata=coord.metadata, + coord=coord, + dims=cube.coord_dims(coord), + ) + category.items_dim.append(item) + + # Categorise the aux and scalar coordinates of the cube. + for coord in cube.aux_coords: + dims = cube.coord_dims(coord) + item = _Item(metadata=coord.metadata, coord=coord, dims=dims) + if dims: + category.items_aux.append(item) + else: + category.items_scalar.append(item) + + return category + + @staticmethod + def _create_prepared_item(coord, dims, src=None, tgt=None): + if src is not None and tgt is not None: + combined = src.combine(tgt) + else: + combined = src or tgt + if not isinstance(dims, Iterable): + dims = (dims,) + prepared_metadata = _PreparedMetadata( + combined=combined, src=src, tgt=tgt + ) + bounds = coord.bounds + result = _PreparedItem( + metadata=prepared_metadata, + points=coord.points.copy(), + bounds=bounds if bounds is None else bounds.copy(), + dims=dims, + container=type(coord), + ) + return result + + @property + def _debug(self): + result = False + level = logger.getEffectiveLevel() + if level != logging.NOTSET: + result = logging.DEBUG >= level + return result + + @staticmethod + def _debug_items(items, title=None): + def _show(items, heading): + logger.debug(f"{title}{heading}:") + for item in items: + dmsg = f"metadata={item.metadata}, dims={item.dims}, bounds={item.coord.has_bounds()}" + logger.debug(dmsg) + + title = f"{title} " if title else "" + _show(items.items_dim, "dim") + _show(items.items_aux, "aux") + _show(items.items_scalar, "scalar") + + @staticmethod + def _dim_coverage(cube, cube_items_dim, common_dim_metadata): + ndim = cube.ndim + metadata = [None] * ndim + coords = [None] * ndim + dims_common = [] + dims_local = [] + dims_free = set(range(ndim)) + + for item in cube_items_dim: + (dim,) = item.dims + dims_free.discard(dim) + metadata[dim] = item.metadata + coords[dim] = item.coord + if item.metadata in common_dim_metadata: + dims_common.append(dim) + else: + dims_local.append(dim) + + return _DimCoverage( + cube=cube, + metadata=metadata, + coords=coords, + dims_common=sorted(dims_common), + dims_local=sorted(dims_local), + dims_free=sorted(dims_free), + ) + + def _dim_mapping(self, src_coverage, tgt_coverage): + for tgt_dim in tgt_coverage.dims_common: + # Search for a src dim metadata match. + tgt_metadata = tgt_coverage.metadata[tgt_dim] + try: + src_dim = src_coverage.metadata.index(tgt_metadata) + self.mapping[src_dim] = tgt_dim + logger.debug(f"{src_dim}->{tgt_dim}") + except ValueError: + # This exception can only occur due to a systemic internal + # failure to correctly identify common dim coordinate metadata + # coverage between the cubes. + emsg = ( + "Failed to map common dim coordinate metadata from " + "source cube {!r} to target cube {!r}, using {!r} on " + "target cube dimension {}." + ) + raise ValueError( + emsg.format( + src_coverage.cube.name(), + tgt_coverage.cube.name(), + tgt_metadata, + tuple([tgt_dim]), + ) + ) + + def _free_mapping( + self, + src_dim_coverage, + tgt_dim_coverage, + src_aux_coverage, + tgt_aux_coverage, + ): + src_cube = src_dim_coverage.cube + tgt_cube = tgt_dim_coverage.cube + src_ndim = src_cube.ndim + tgt_ndim = tgt_cube.ndim + + # mapping src to tgt, involving free dimensions on either the src/tgt. + free_mapping = {} + + # Determine the src/tgt dimensions that are not mapped, + # and not covered by any metadata. + src_free = set(src_dim_coverage.dims_free) & set( + src_aux_coverage.dims_free + ) + tgt_free = set(tgt_dim_coverage.dims_free) & set( + tgt_aux_coverage.dims_free + ) + + if src_free or tgt_free: + # Determine the src/tgt dimensions that are not mapped. + src_unmapped = set(range(src_ndim)) - set(self.mapping) + tgt_unmapped = set(range(tgt_ndim)) - set(self.mapping.values()) + + # Determine the src/tgt dimensions that are not mapped, + # but are covered by a src/tgt local coordinate. + src_unmapped_local = src_unmapped - src_free + tgt_unmapped_local = tgt_unmapped - tgt_free + + src_shape = src_cube.shape + tgt_shape = tgt_cube.shape + src_max, tgt_max = max(src_shape), max(tgt_shape) + + def assign_mapping(extent, unmapped_local_items, free_items=None): + result = None + if free_items is None: + free_items = [] + if extent == 1: + if unmapped_local_items: + result, _ = unmapped_local_items.pop(0) + elif free_items: + result, _ = free_items.pop(0) + else: + + def _filter(items): + return list( + filter(lambda item: item[1] == extent, items) + ) + + def _pop(item, items): + result, _ = item + index = items.index(item) + items.pop(index) + return result + + items = _filter(unmapped_local_items) + if items: + result = _pop(items[0], unmapped_local_items) + else: + items = _filter(free_items) + if items: + result = _pop(items[0], free_items) + return result + + if src_free: + # Attempt to map src free dimensions to tgt unmapped local or free dimensions. + tgt_unmapped_local_items = [ + (dim, tgt_shape[dim]) for dim in tgt_unmapped_local + ] + tgt_free_items = [(dim, tgt_shape[dim]) for dim in tgt_free] + + for src_dim in sorted( + src_free, key=lambda dim: (src_max - src_shape[dim], dim) + ): + tgt_dim = assign_mapping( + src_shape[src_dim], + tgt_unmapped_local_items, + tgt_free_items, + ) + if tgt_dim is None: + # Failed to map the src free dimension + # to a suitable tgt local/free dimension. + dmsg = ( + f"failed to map src free dimension ({src_dim},) from " + f"{self._src_cube_position} cube {src_cube.name()!r} to " + f"{self._tgt_cube_position} cube {tgt_cube.name()!r}." + ) + logger.debug(dmsg) + break + free_mapping[src_dim] = tgt_dim + else: + # Attempt to map tgt free dimensions to src unmapped local dimensions. + src_unmapped_local_items = [ + (dim, src_shape[dim]) for dim in src_unmapped_local + ] + + for tgt_dim in sorted( + tgt_free, key=lambda dim: (tgt_max - tgt_shape[dim], dim) + ): + src_dim = assign_mapping( + tgt_shape[tgt_dim], src_unmapped_local_items + ) + if src_dim is not None: + free_mapping[src_dim] = tgt_dim + if not src_unmapped_local_items: + # There are no more src unmapped local dimensions. + break + + # Determine whether there are still unmapped src dimensions. + src_unmapped = ( + set(range(src_cube.ndim)) - set(self.mapping) - set(free_mapping) + ) + + if src_unmapped: + plural = "s" if len(src_unmapped) > 1 else "" + emsg = ( + "Insufficient matching coordinate metadata to resolve cubes, " + f"cannot map dimension{plural} {tuple(sorted(src_unmapped))} " + f"of the {self._src_cube_position} cube {src_cube.name()!r} " + f"to the {self._tgt_cube_position} cube {tgt_cube.name()!r}." + ) + raise ValueError(emsg) + + # Update the mapping. + self.mapping.update(free_mapping) + logger.debug(f"mapping free dimensions gives, mapping={self.mapping}") + + def _init(self, lhs, rhs): + from iris.cube import Cube + + emsg = ( + "{cls} requires {arg!r} argument to be a 'Cube', got {actual!r}." + ) + clsname = self.__class__.__name__ + + if not isinstance(lhs, Cube): + raise TypeError( + emsg.format(cls=clsname, arg="LHS", actual=type(lhs)) + ) + + if not isinstance(rhs, Cube): + raise TypeError( + emsg.format(cls=clsname, arg="RHS", actual=type(rhs)) + ) + + # The LHS cube to be resolved into the resultant cube. + self.lhs_cube = lhs + # The RHS cube to be resolved into the resultant cube. + self.rhs_cube = rhs + + # The transposed/reshaped (if required) LHS cube, which + # can be broadcast with RHS cube. + self.lhs_cube_resolved = None + # The transposed/reshaped (if required) RHS cube, which + # can be broadcast with LHS cube. + self.rhs_cube_resolved = None + + # Categorised dim, aux and scalar coordinate items for LHS cube. + self.lhs_cube_category = None + # Categorised dim, aux and scalar coordinate items for RHS cube. + self.rhs_cube_category = None + + # Categorised dim, aux and scalar coordinate items local to LHS cube only. + self.lhs_cube_category_local = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # Categorised dim, aux and scalar coordinate items local to RHS cube only. + self.rhs_cube_category_local = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # Categorised dim, aux and scalar coordinate items common to both + # LHS cube and RHS cube. + self.category_common = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + + # Analysis of dim coordinates spanning LHS cube. + self.lhs_cube_dim_coverage = None + # Analysis of aux and scalar coordinates spanning LHS cube. + self.lhs_cube_aux_coverage = None + # Analysis of dim coordinates spanning RHS cube. + self.rhs_cube_dim_coverage = None + # Analysis of aux and scalar coordinates spanning RHS cube. + self.rhs_cube_aux_coverage = None + + # Map common metadata from RHS cube to LHS cube if LHS-rank >= RHS-rank, + # otherwise map common metadata from LHS cube to RHS cube. + if self.lhs_cube.ndim >= self.rhs_cube.ndim: + self.map_rhs_to_lhs = True + else: + self.map_rhs_to_lhs = False + + # Mapping of the dimensions between common metadata for the cubes, + # where the direction of the mapping is governed by map_rhs_to_lhs. + self.mapping = None + + # Cache containing a list of dim, aux and scalar coordinates prepared + # and ready for creating and attaching to the resultant cube. + self.prepared_category = None + + # Cache containing a list of aux factories prepared and ready for + # creating and attaching to the resultant cube. + self.prepared_factories = None + + # The shape of the resultant resolved cube. + self._broadcast_shape = None + + def _metadata_coverage(self): + # Determine the common dim coordinate metadata coverage. + common_dim_metadata = [ + item.metadata for item in self.category_common.items_dim + ] + + self.lhs_cube_dim_coverage = self._dim_coverage( + self.lhs_cube, + self.lhs_cube_category.items_dim, + common_dim_metadata, + ) + self.rhs_cube_dim_coverage = self._dim_coverage( + self.rhs_cube, + self.rhs_cube_category.items_dim, + common_dim_metadata, + ) + + # Determine the common aux and scalar coordinate metadata coverage. + common_aux_metadata = [ + item.metadata for item in self.category_common.items_aux + ] + common_scalar_metadata = [ + item.metadata for item in self.category_common.items_scalar + ] + + self.lhs_cube_aux_coverage = self._aux_coverage( + self.lhs_cube, + self.lhs_cube_category.items_aux, + self.lhs_cube_category.items_scalar, + common_aux_metadata, + common_scalar_metadata, + ) + self.rhs_cube_aux_coverage = self._aux_coverage( + self.rhs_cube, + self.rhs_cube_category.items_aux, + self.rhs_cube_category.items_scalar, + common_aux_metadata, + common_scalar_metadata, + ) + + def _metadata_mapping(self): + # Initialise the state. + self.mapping = {} + + # Map RHS cube to LHS cube, or smaller to larger cube rank. + if self.map_rhs_to_lhs: + src_cube = self.rhs_cube + src_dim_coverage = self.rhs_cube_dim_coverage + src_aux_coverage = self.rhs_cube_aux_coverage + tgt_cube = self.lhs_cube + tgt_dim_coverage = self.lhs_cube_dim_coverage + tgt_aux_coverage = self.lhs_cube_aux_coverage + else: + src_cube = self.lhs_cube + src_dim_coverage = self.lhs_cube_dim_coverage + src_aux_coverage = self.lhs_cube_aux_coverage + tgt_cube = self.rhs_cube + tgt_dim_coverage = self.rhs_cube_dim_coverage + tgt_aux_coverage = self.rhs_cube_aux_coverage + + # Use the dim coordinates to fully map the + # src cube dimensions to the tgt cube dimensions. + self._dim_mapping(src_dim_coverage, tgt_dim_coverage) + logger.debug( + f"mapping common dim coordinates gives, mapping={self.mapping}" + ) + + # If necessary, use the aux coordinates to fully map the + # src cube dimensions to the tgt cube dimensions. + if not self.mapped: + self._aux_mapping(src_aux_coverage, tgt_aux_coverage) + logger.debug( + f"mapping common aux coordinates, mapping={self.mapping}" + ) + + if not self.mapped: + # Attempt to complete the mapping using src/tgt free dimensions. + # Note that, this may not be possible and result in an exception. + self._free_mapping( + src_dim_coverage, + tgt_dim_coverage, + src_aux_coverage, + tgt_aux_coverage, + ) + + # Attempt to transpose/reshape the cubes into compatible broadcast shapes. + # Note that, this may not be possible and result in an exception. + self._as_compatible_cubes() + + # Given the resultant broadcast shape, determine whether the + # mapping requires to be reversed. + broadcast_flip = ( + src_cube.ndim == tgt_cube.ndim + and self._tgt_cube_resolved.shape != self.shape + and self._src_cube_resolved.shape == self.shape + ) + + # Given the number of free dimensions, determine whether the + # mapping requires to be reversed. + src_free = set(src_dim_coverage.dims_free) & set( + src_aux_coverage.dims_free + ) + tgt_free = set(tgt_dim_coverage.dims_free) & set( + tgt_aux_coverage.dims_free + ) + free_flip = len(tgt_free) > len(src_free) + + # Reverse the mapping direction. + if broadcast_flip or free_flip: + flip_mapping = { + tgt_dim: src_dim for src_dim, tgt_dim in self.mapping.items() + } + self.map_rhs_to_lhs = not self.map_rhs_to_lhs + dmsg = ( + f"reversing the mapping from {self.mapping} to {flip_mapping}, " + f"now map_rhs_to_lhs={self.map_rhs_to_lhs}" + ) + logger.debug(dmsg) + self.mapping = flip_mapping + # Now require to transpose/reshape the cubes into compatible + # broadcast cubes again, due to possible non-commutative behaviour + # after reversing the mapping direction. + self._as_compatible_cubes() + + def _metadata_prepare(self): + # Initialise the state. + self.prepared_category = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + self.prepared_factories = [] + + # Map RHS cube to LHS cube, or smaller to larger cube rank. + if self.map_rhs_to_lhs: + src_cube = self.rhs_cube + src_category_local = self.rhs_cube_category_local + src_dim_coverage = self.rhs_cube_dim_coverage + src_aux_coverage = self.rhs_cube_aux_coverage + tgt_cube = self.lhs_cube + tgt_category_local = self.lhs_cube_category_local + tgt_dim_coverage = self.lhs_cube_dim_coverage + tgt_aux_coverage = self.lhs_cube_aux_coverage + else: + src_cube = self.lhs_cube + src_category_local = self.lhs_cube_category_local + src_dim_coverage = self.lhs_cube_dim_coverage + src_aux_coverage = self.lhs_cube_aux_coverage + tgt_cube = self.rhs_cube + tgt_category_local = self.rhs_cube_category_local + tgt_dim_coverage = self.rhs_cube_dim_coverage + tgt_aux_coverage = self.rhs_cube_aux_coverage + + # Determine the resultant cube dim coordinate/s. + self._prepare_common_dim_payload(src_dim_coverage, tgt_dim_coverage) + + # Determine the resultant cube aux coordinate/s. + self._prepare_common_aux_payload( + src_aux_coverage.common_items_aux, # input + tgt_aux_coverage.common_items_aux, # input + self.prepared_category.items_aux, # output + ) + + # Determine the resultant cube scalar coordinate/s. + self._prepare_common_aux_payload( + src_aux_coverage.common_items_scalar, # input + tgt_aux_coverage.common_items_scalar, # input + self.prepared_category.items_scalar, # output + ignore_mismatch=True, + ) + + self._prepare_local_payload( + src_dim_coverage, + src_aux_coverage, + tgt_dim_coverage, + tgt_aux_coverage, + ) + + self._prepare_factory_payload( + tgt_cube, tgt_category_local, from_src=False + ) + self._prepare_factory_payload(src_cube, src_category_local) + + def _metadata_resolve(self): + """ + Categorise the coordinate metadata of the cubes into three distinct + groups; metadata from coordinates only available (local) on the LHS + cube, metadata from coordinates only available (local) on the RHS + cube, and metadata from coordinates common to both the LHS and RHS + cubes. + + This is only applicable to coordinates that are members of the + 'aux_coords' or 'dim_coords' of the participating cubes. + + """ + + # Determine the cube dim, aux and scalar coordinate items + # for each individual cube. + self.lhs_cube_category = self._categorise_items(self.lhs_cube) + self.rhs_cube_category = self._categorise_items(self.rhs_cube) + + def _categorise( + lhs_items, + rhs_items, + lhs_local_items, + rhs_local_items, + common_items, + ): + rhs_items_metadata = [item.metadata for item in rhs_items] + # Track common metadata here as a temporary convenience. + common_metadata = [] + + # Determine items local to the lhs, and shared items + # common to both lhs and rhs. + for item in lhs_items: + metadata = item.metadata + if metadata in rhs_items_metadata: + # The metadata is common between lhs and rhs. + if metadata not in common_metadata: + common_items.append(item) + common_metadata.append(metadata) + else: + # The metadata is local to the lhs. + lhs_local_items.append(item) + + # Determine items local to the rhs. + for item in rhs_items: + if item.metadata not in common_metadata: + rhs_local_items.append(item) + + # Determine local and common dim category items. + _categorise( + self.lhs_cube_category.items_dim, # input + self.rhs_cube_category.items_dim, # input + self.lhs_cube_category_local.items_dim, # output + self.rhs_cube_category_local.items_dim, # output + self.category_common.items_dim, # output + ) + + # Determine local and common aux category items. + _categorise( + self.lhs_cube_category.items_aux, # input + self.rhs_cube_category.items_aux, # input + self.lhs_cube_category_local.items_aux, # output + self.rhs_cube_category_local.items_aux, # output + self.category_common.items_aux, # output + ) + + # Determine local and common scalar category items. + _categorise( + self.lhs_cube_category.items_scalar, # input + self.rhs_cube_category.items_scalar, # input + self.lhs_cube_category_local.items_scalar, # output + self.rhs_cube_category_local.items_scalar, # output + self.category_common.items_scalar, # output + ) + + # Sort the resultant categories by metadata name for consistency, + # in-place. + categories = ( + self.lhs_cube_category, + self.rhs_cube_category, + self.lhs_cube_category_local, + self.rhs_cube_category_local, + self.category_common, + ) + key_func = lambda item: item.metadata.name() + + for category in categories: + category.items_dim.sort(key=key_func) + category.items_aux.sort(key=key_func) + category.items_scalar.sort(key=key_func) + + def _prepare_common_aux_payload( + self, + src_common_items, + tgt_common_items, + prepared_items, + ignore_mismatch=None, + ): + from iris.coords import AuxCoord + + if ignore_mismatch is None: + # Configure ability to ignore coordinate points/bounds + # mismatches between common items. + ignore_mismatch = False + + for src_item in src_common_items: + src_metadata = src_item.metadata + tgt_items = tuple( + filter( + lambda tgt_item: tgt_item.metadata == src_metadata, + tgt_common_items, + ) + ) + if not tgt_items: + dmsg = ( + f"ignoring src {self._src_cube_position} cube aux coordinate " + f"{src_metadata}, does not match any common tgt " + f"{self._tgt_cube_position} cube aux coordinate metadata" + ) + logger.debug(dmsg) + elif len(tgt_items) > 1: + dmsg = ( + f"ignoring src {self._src_cube_position} cube aux coordinate " + f"{src_metadata}, matches multiple [{len(tgt_items)}] common " + f"tgt {self._tgt_cube_position} cube aux coordinate metadata" + ) + logger.debug(dmsg) + else: + (tgt_item,) = tgt_items + src_coord = src_item.coord + tgt_coord = tgt_item.coord + points, bounds = self._prepare_points_and_bounds( + src_coord, + tgt_coord, + src_item.dims, + tgt_item.dims, + ignore_mismatch=ignore_mismatch, + ) + if points is not None: + src_type = type(src_coord) + tgt_type = type(tgt_coord) + # Downcast to aux if there are mixed container types. + container = src_type if src_type is tgt_type else AuxCoord + prepared_metadata = _PreparedMetadata( + combined=src_metadata.combine(tgt_item.metadata), + src=src_metadata, + tgt=tgt_item.metadata, + ) + prepared_item = _PreparedItem( + metadata=prepared_metadata, + points=points.copy(), + bounds=bounds if bounds is None else bounds.copy(), + dims=tgt_item.dims, + container=container, + ) + prepared_items.append(prepared_item) + + def _prepare_common_dim_payload( + self, src_coverage, tgt_coverage, ignore_mismatch=None + ): + from iris.coords import DimCoord + + if ignore_mismatch is None: + # Configure ability to ignore coordinate points/bounds + # mismatches between common items. + ignore_mismatch = False + + for src_dim in src_coverage.dims_common: + src_metadata = src_coverage.metadata[src_dim] + src_coord = src_coverage.coords[src_dim] + + tgt_dim = self.mapping[src_dim] + tgt_metadata = tgt_coverage.metadata[tgt_dim] + tgt_coord = tgt_coverage.coords[tgt_dim] + + points, bounds = self._prepare_points_and_bounds( + src_coord, + tgt_coord, + src_dim, + tgt_dim, + ignore_mismatch=ignore_mismatch, + ) + + if points is not None: + prepared_metadata = _PreparedMetadata( + combined=src_metadata.combine(tgt_metadata), + src=src_metadata, + tgt=tgt_metadata, + ) + prepared_item = _PreparedItem( + metadata=prepared_metadata, + points=points.copy(), + bounds=bounds if bounds is None else bounds.copy(), + dims=(tgt_dim,), + container=DimCoord, + ) + self.prepared_category.items_dim.append(prepared_item) + + def _prepare_factory_payload(self, cube, category_local, from_src=True): + def _get_prepared_item(metadata, from_src=True, from_local=False): + result = None + if from_local: + category = category_local + match = lambda item: item.metadata == metadata + else: + category = self.prepared_category + if from_src: + match = lambda item: item.metadata.src == metadata + else: + match = lambda item: item.metadata.tgt == metadata + for member in category._fields: + category_items = getattr(category, member) + matched_items = tuple(filter(match, category_items)) + if matched_items: + if len(matched_items) > 1: + dmsg = ( + f"ignoring factory dependency {metadata}, multiple {'src' if from_src else 'tgt'} " + f"{'local' if from_local else 'prepared'} metadata matches" + ) + logger.debug(dmsg) + else: + (item,) = matched_items + if from_local: + src = tgt = None + if from_src: + src = item.metadata + dims = tuple( + [self.mapping[dim] for dim in item.dims] + ) + else: + tgt = item.metadata + dims = item.dims + result = self._create_prepared_item( + item.coord, dims, src=src, tgt=tgt + ) + getattr(self.prepared_category, member).append( + result + ) + else: + result = item + break + return result + + for factory in cube.aux_factories: + container = type(factory) + dependencies = {} + prepared_item = None + + if tuple( + filter( + lambda item: item.container is container, + self.prepared_factories, + ) + ): + # debug: skipping, factory already exists + dmsg = ( + f"ignoring {'src' if from_src else 'tgt'} {container}, " + f"a similar factory has already been prepared" + ) + logger.debug(dmsg) + continue + + for ( + dependency_name, + dependency_coord, + ) in factory.dependencies.items(): + metadata = dependency_coord.metadata + prepared_item = _get_prepared_item(metadata, from_src=from_src) + if prepared_item is None: + prepared_item = _get_prepared_item( + metadata, from_src=from_src, from_local=True + ) + if prepared_item is None: + dmsg = f"cannot find matching {metadata} for {container} dependency {dependency_name}" + logger.debug(dmsg) + break + dependencies[dependency_name] = prepared_item.metadata + + if prepared_item is not None: + prepared_factory = _PreparedFactory( + container=container, dependencies=dependencies + ) + self.prepared_factories.append(prepared_factory) + else: + dmsg = f"ignoring {'src' if from_src else 'tgt'} {container}, cannot find all dependencies" + logger.debug(dmsg) + + def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): + # Determine whether there are tgt dimensions not mapped to by an + # associated src dimension, and thus may be covered by any local + # tgt aux coordinates. + extra_tgt_dims = set(range(tgt_aux_coverage.cube.ndim)) - set( + self.mapping.values() + ) + + if LENIENT["maths"]: + mapped_src_dims = set(self.mapping.keys()) + mapped_tgt_dims = set(self.mapping.values()) + + # Add local src aux coordinates. + for item in src_aux_coverage.local_items_aux: + if all([dim in mapped_src_dims for dim in item.dims]): + tgt_dims = tuple([self.mapping[dim] for dim in item.dims]) + prepared_item = self._create_prepared_item( + item.coord, tgt_dims, src=item.metadata + ) + self.prepared_category.items_aux.append(prepared_item) + else: + dmsg = ( + f"ignoring local src {self._src_cube_position} cube " + f"aux coordinate {item.metadata}, as not all src " + f"dimensions {item.dims} are mapped" + ) + logger.debug(dmsg) + else: + # For strict maths, only local tgt aux coordinates covering + # the extra dimensions of the tgt cube may be added. + mapped_tgt_dims = set() + + # Add local tgt aux coordinates. + for item in tgt_aux_coverage.local_items_aux: + tgt_dims = item.dims + if all([dim in mapped_tgt_dims for dim in tgt_dims]) or any( + [dim in extra_tgt_dims for dim in tgt_dims] + ): + prepared_item = self._create_prepared_item( + item.coord, tgt_dims, tgt=item.metadata + ) + self.prepared_category.items_aux.append(prepared_item) + else: + dmsg = ( + f"ignoring local tgt {self._tgt_cube_position} cube " + f"aux coordinate {item.metadata}, as not all tgt " + f"dimensions {tgt_dims} are mapped" + ) + logger.debug(dmsg) + + def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): + mapped_tgt_dims = self.mapping.values() + + # Determine whether there are tgt dimensions not mapped to by an + # associated src dimension, and thus may be covered by any local + # tgt dim coordinates. + extra_tgt_dims = set(range(tgt_dim_coverage.cube.ndim)) - set( + mapped_tgt_dims + ) + + if LENIENT["maths"]: + tgt_dims_conflict = set() + + # Add local src dim coordinates. + for src_dim in src_dim_coverage.dims_local: + tgt_dim = self.mapping[src_dim] + # Only add the local src dim coordinate iff there is no + # associated local tgt dim coordinate. + if tgt_dim not in tgt_dim_coverage.dims_local: + metadata = src_dim_coverage.metadata[src_dim] + coord = src_dim_coverage.coords[src_dim] + prepared_item = self._create_prepared_item( + coord, tgt_dim, src=metadata + ) + self.prepared_category.items_dim.append(prepared_item) + else: + tgt_dims_conflict.add(tgt_dim) + if self._debug: + src_metadata = src_dim_coverage.metadata[src_dim] + tgt_metadata = tgt_dim_coverage.metadata[tgt_dim] + dmsg = ( + f"ignoring local src {self._src_cube_position} cube " + f"dim coordinate {src_metadata}, as conflicts with " + f"tgt {self._tgt_cube_position} cube dim coordinate " + f"{tgt_metadata}, mapping ({src_dim},)->({tgt_dim},)" + ) + logger.debug(dmsg) + + # Determine whether there are any tgt dims free to be mapped + # by an available local tgt dim coordinate. + tgt_dims_unmapped = ( + set(tgt_dim_coverage.dims_local) - tgt_dims_conflict + ) + else: + # For strict maths, only local tgt dim coordinates covering + # the extra dimensions of the tgt cube may be added. + tgt_dims_unmapped = extra_tgt_dims + + # Add local tgt dim coordinates. + for tgt_dim in tgt_dims_unmapped: + if tgt_dim in mapped_tgt_dims or tgt_dim in extra_tgt_dims: + metadata = tgt_dim_coverage.metadata[tgt_dim] + if metadata is not None: + coord = tgt_dim_coverage.coords[tgt_dim] + prepared_item = self._create_prepared_item( + coord, tgt_dim, tgt=metadata + ) + self.prepared_category.items_dim.append(prepared_item) + + def _prepare_local_payload_scalar( + self, src_aux_coverage, tgt_aux_coverage + ): + # Add all local tgt scalar coordinates iff the src cube is a + # scalar cube with no local src scalar coordinates. + # Only for strict maths. + src_scalar_cube = ( + not LENIENT["maths"] + and src_aux_coverage.cube.ndim == 0 + and len(src_aux_coverage.local_items_scalar) == 0 + ) + + if src_scalar_cube or LENIENT["maths"]: + # Add any local src scalar coordinates, if available. + for item in src_aux_coverage.local_items_scalar: + prepared_item = self._create_prepared_item( + item.coord, item.dims, src=item.metadata + ) + self.prepared_category.items_scalar.append(prepared_item) + + # Add any local tgt scalar coordinates, if available. + for item in tgt_aux_coverage.local_items_scalar: + prepared_item = self._create_prepared_item( + item.coord, item.dims, tgt=item.metadata + ) + self.prepared_category.items_scalar.append(prepared_item) + + def _prepare_local_payload( + self, + src_dim_coverage, + src_aux_coverage, + tgt_dim_coverage, + tgt_aux_coverage, + ): + # Add local src/tgt dim coordinates. + self._prepare_local_payload_dim(src_dim_coverage, tgt_dim_coverage) + + # Add local src/tgt aux coordinates. + self._prepare_local_payload_aux(src_aux_coverage, tgt_aux_coverage) + + # Add local src/tgt scalar coordinates. + self._prepare_local_payload_scalar(src_aux_coverage, tgt_aux_coverage) + + def _prepare_points_and_bounds( + self, src_coord, tgt_coord, src_dims, tgt_dims, ignore_mismatch=None + ): + from iris.util import array_equal + + if ignore_mismatch is None: + # Configure ability to ignore coordinate points/bounds + # mismatches between common items. + ignore_mismatch = False + + points, bounds = None, None + + if not isinstance(src_dims, Iterable): + src_dims = (src_dims,) + + if not isinstance(tgt_dims, Iterable): + tgt_dims = (tgt_dims,) + + # Deal with coordinates that have been sliced. + if src_coord.ndim != tgt_coord.ndim: + if tgt_coord.ndim > src_coord.ndim: + # Use the tgt coordinate points/bounds. + points = tgt_coord.points + bounds = tgt_coord.bounds + else: + # Use the src coordinate points/bounds. + points = src_coord.points + bounds = src_coord.bounds + + # Deal with coordinates spanning broadcast dimensions. + if ( + points is None + and bounds is None + and src_coord.shape != tgt_coord.shape + ): + # Check whether the src coordinate is broadcasting. + dims = tuple([self.mapping[dim] for dim in src_dims]) + src_shape_broadcast = tuple([self.shape[dim] for dim in dims]) + src_cube_shape = self._src_cube.shape + src_shape = tuple([src_cube_shape[dim] for dim in src_dims]) + src_broadcasting = src_shape != src_shape_broadcast + + # Check whether the tgt coordinate is broadcasting. + tgt_shape_broadcast = tuple([self.shape[dim] for dim in tgt_dims]) + tgt_cube_shape = self._tgt_cube.shape + tgt_shape = tuple([tgt_cube_shape[dim] for dim in tgt_dims]) + tgt_broadcasting = tgt_shape != tgt_shape_broadcast + + if src_broadcasting and tgt_broadcasting: + emsg = ( + f"Cannot broadcast the coordinate {src_coord.name()!r} on " + f"{self._src_cube_position} cube {self._src_cube.name()!r} and " + f"coordinate {tgt_coord.name()!r} on " + f"{self._tgt_cube_position} cube {self._tgt_cube.name()!r} to " + f"broadcast shape {tgt_shape_broadcast}." + ) + raise ValueError(emsg) + elif src_broadcasting: + # Use the tgt coordinate points/bounds. + points = tgt_coord.points + bounds = tgt_coord.bounds + elif tgt_broadcasting: + # Use the src coordinate points/bounds. + points = src_coord.points + bounds = src_coord.bounds + + if points is None and bounds is None: + # Note that, this also ensures shape equality. + eq_points = array_equal( + src_coord.points, tgt_coord.points, withnans=True + ) + if eq_points: + points = src_coord.points + src_has_bounds = src_coord.has_bounds() + tgt_has_bounds = tgt_coord.has_bounds() + + if src_has_bounds and tgt_has_bounds: + src_bounds = src_coord.bounds + eq_bounds = array_equal( + src_bounds, tgt_coord.bounds, withnans=True + ) + + if eq_bounds: + bounds = src_bounds + else: + if LENIENT["maths"] and ignore_mismatch: + # For lenient, ignore coordinate with mis-matched bounds. + dmsg = ( + f"ignoring src {self._src_cube_position} cube " + f"{src_coord.metadata}, unequal bounds with " + f"tgt {self._tgt_cube_position} cube, " + f"{src_dims}->{tgt_dims}" + ) + logger.debug(dmsg) + else: + emsg = ( + f"Coordinate {src_coord.name()!r} has different bounds for the " + f"LHS cube {self.lhs_cube.name()!r} and " + f"RHS cube {self.rhs_cube.name()!r}." + ) + raise ValueError(emsg) + else: + # For lenient, use either of the coordinate bounds, if they exist. + if LENIENT["maths"]: + if src_has_bounds: + dmsg = ( + f"using src {self._src_cube_position} cube " + f"{src_coord.metadata} bounds, tgt has no bounds" + ) + logger.debug(dmsg) + bounds = src_coord.bounds + else: + dmsg = ( + f"using tgt {self._tgt_cube_position} cube " + f"{tgt_coord.metadata} bounds, src has no bounds" + ) + logger.debug(dmsg) + bounds = tgt_coord.bounds + else: + # For strict, both coordinates must have bounds, or both + # coordinates must not have bounds. + if src_has_bounds: + emsg = ( + f"Coordinate {src_coord.name()!r} has bounds for the " + f"{self._src_cube_position} cube {self._src_cube.name()!r}, " + f"but not the {self._tgt_cube_position} cube {self._tgt_cube.name()!r}." + ) + raise ValueError(emsg) + if tgt_has_bounds: + emsg = ( + f"Coordinate {tgt_coord.name()!r} has bounds for the " + f"{self._tgt_cube_position} cube {self._tgt_cube.name()!r}, " + f"but not the {self._src_cube_position} cube {self._src_cube.name()!r}." + ) + raise ValueError(emsg) + else: + if LENIENT["maths"] and ignore_mismatch: + # For lenient, ignore coordinate with mis-matched points. + dmsg = ( + f"ignoring src {self._src_cube_position} cube " + f"{src_coord.metadata}, unequal points with tgt " + f"{src_dims}->{tgt_dims}" + ) + logger.debug(dmsg) + else: + emsg = ( + f"Coordinate {src_coord.name()!r} has different points for the " + f"LHS cube {self.lhs_cube.name()!r} and " + f"RHS cube {self.rhs_cube.name()!r}." + ) + raise ValueError(emsg) + + return points, bounds + + @property + def _src_cube(self): + if self.map_rhs_to_lhs: + result = self.rhs_cube + else: + result = self.lhs_cube + return result + + @property + def _src_cube_position(self): + if self.map_rhs_to_lhs: + result = "RHS" + else: + result = "LHS" + return result + + @property + def _src_cube_resolved(self): + if self.map_rhs_to_lhs: + result = self.rhs_cube_resolved + else: + result = self.lhs_cube_resolved + return result + + @_src_cube_resolved.setter + def _src_cube_resolved(self, cube): + if self.map_rhs_to_lhs: + self.rhs_cube_resolved = cube + else: + self.lhs_cube_resolved = cube + + @property + def _tgt_cube(self): + if self.map_rhs_to_lhs: + result = self.lhs_cube + else: + result = self.rhs_cube + return result + + @property + def _tgt_cube_position(self): + if self.map_rhs_to_lhs: + result = "LHS" + else: + result = "RHS" + return result + + @property + def _tgt_cube_resolved(self): + if self.map_rhs_to_lhs: + result = self.lhs_cube_resolved + else: + result = self.rhs_cube_resolved + return result + + @_tgt_cube_resolved.setter + def _tgt_cube_resolved(self, cube): + if self.map_rhs_to_lhs: + self.lhs_cube_resolved = cube + else: + self.rhs_cube_resolved = cube + + def _tgt_cube_prepare(self, data): + cube = self._tgt_cube + + # Replace existing tgt cube data with the provided data. + cube.data = data + + # Clear the aux factories. + for factory in cube.aux_factories: + cube.remove_aux_factory(factory) + + # Clear the cube coordinates. + for coord in cube.coords(): + cube.remove_coord(coord) + + # Clear the cube cell measures. + for cm in cube.cell_measures(): + cube.remove_cell_measure(cm) + + # Clear the ancillary variables. + for av in cube.ancillary_variables(): + cube.remove_ancillary_variable(av) + + def cube(self, data, in_place=False): + from iris.cube import Cube + + expected_shape = self.shape + + # Ensure that we have been provided with candidate cubes, which are + # now resolved and metadata is prepared, ready and awaiting the + # resultant resolved cube. + if expected_shape is None: + emsg = ( + "Cannot resolve resultant cube, as no candidate cubes have " + "been provided." + ) + raise ValueError(emsg) + + if not hasattr(data, "shape"): + data = np.asanyarray(data) + + # Ensure that the shape of the provided data is the expected + # shape of the resultant resolved cube. + if data.shape != expected_shape: + emsg = ( + "Cannot resolve resultant cube, as the provided data must " + f"have shape {expected_shape}, got data shape {data.shape}." + ) + raise ValueError(emsg) + + if in_place: + result = self._tgt_cube + + if result.shape != expected_shape: + emsg = ( + "Cannot resolve resultant cube in-place, as the " + f"{self._tgt_cube_position} tgt cube {result.name()!r} " + f"requires data with shape {result.shape}, got data " + f"shape {data.shape}. Suggest not performing this " + "operation in-place." + ) + raise ValueError(emsg) + + # Prepare target cube for in-place population with the prepared + # metadata content and the provided data. + self._tgt_cube_prepare(data) + else: + # Create the resultant resolved cube with provided data. + result = Cube(data) + + # Add the combined cube metadata from both the candidate cubes. + result.metadata = self.lhs_cube.metadata.combine( + self.rhs_cube.metadata + ) + + # Add the prepared dim coordinates. + for item in self.prepared_category.items_dim: + coord = item.container(item.points, bounds=item.bounds) + coord.metadata = item.metadata.combined + result.add_dim_coord(coord, item.dims) + + # Add the prepared aux and scalar coordinates. + prepared_aux_coords = ( + self.prepared_category.items_aux + + self.prepared_category.items_scalar + ) + for item in prepared_aux_coords: + coord = item.container(item.points, bounds=item.bounds) + coord.metadata = item.metadata.combined + try: + result.add_aux_coord(coord, item.dims) + except ValueError as err: + scalar = dims = "" + if item.dims: + plural = "s" if len(item.dims) > 1 else "" + dims = f" with tgt dim{plural} {item.dims}" + else: + scalar = "scalar " + dmsg = ( + f"ignoring prepared {scalar}coordinate " + f"{coord.metadata}{dims}, got {err!r}" + ) + logger.debug(dmsg) + + # Add the prepared aux factories. + for prepared_factory in self.prepared_factories: + dependencies = dict() + for ( + dependency_name, + prepared_metadata, + ) in prepared_factory.dependencies.items(): + coord = result.coord(prepared_metadata.combined) + dependencies[dependency_name] = coord + factory = prepared_factory.container(**dependencies) + result.add_aux_factory(factory) + + return result + + @property + def mapped(self): + """ + Returns the state of whether all src cube dimensions have been + associated with relevant tgt cube dimensions. + + """ + return self._src_cube.ndim == len(self.mapping) + + @property + def shape(self): + """Returns the shape of the resultant resolved cube.""" + return getattr(self, "_broadcast_shape", None) diff --git a/lib/iris/config.py b/lib/iris/config.py index e1d7dee29d..eeef1873f9 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -32,8 +32,11 @@ import configparser import contextlib +import logging.config import os.path +import pathlib import warnings +import yaml # Returns simple string options @@ -81,6 +84,14 @@ def get_dir_option(section, option, default=None): config = configparser.ConfigParser() config.read([os.path.join(CONFIG_PATH, "site.cfg")]) +# Configure logging. +fname_logging = pathlib.Path(CONFIG_PATH) / "logging.yaml" +if not fname_logging.exists(): + emsg = f"Logging configuration file '{fname_logging!s}' does not exist." + raise FileNotFoundError(emsg) +with open(fname_logging) as fi: + logging.config.dictConfig(yaml.safe_load(fi)) +del fname_logging ################## # Resource options diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index cc41b27b34..812dfae23e 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -126,12 +126,13 @@ def __init__( radius. If just two of semi_major_axis, semi_minor_axis, and - inverse_flattening are given the missing element is calulated from the + inverse_flattening are given the missing element is calculated from the formula: :math:`flattening = (major - minor) / major` Currently, Iris will not allow over-specification (all three ellipsoid - paramaters). + parameters). + Examples:: cs = GeogCS(6371229) diff --git a/lib/iris/coords.py b/lib/iris/coords.py index b5392579c8..d2dcd35c92 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -25,13 +25,19 @@ from iris._data_manager import DataManager import iris._lazy_data as _lazy import iris.aux_factory +from iris.common import ( + AncillaryVariableMetadata, + BaseMetadata, + CFVariableMixin, + CellMeasureMetadata, + CoordMetadata, + DimCoordMetadata, + metadata_manager_factory, +) import iris.exceptions import iris.time import iris.util -from iris._cube_coord_common import CFVariableMixin -from iris.util import points_step - class _DimensionalMetadata(CFVariableMixin, metaclass=ABCMeta): """ @@ -59,7 +65,7 @@ def __init__( standard_name=None, long_name=None, var_name=None, - units="no-unit", + units=None, attributes=None, ): """ @@ -92,6 +98,10 @@ def __init__( # its __init__ or __copy__ methods. The only bounds-related behaviour # it provides is a 'has_bounds()' method, which always returns False. + # Configure the metadata manager. + if not hasattr(self, "_metadata_manager"): + self._metadata_manager = metadata_manager_factory(BaseMetadata) + #: CF standard name of the quantity that the metadata represents. self.standard_name = standard_name @@ -160,7 +170,7 @@ def copy(self, values=None): * values An array of values for the new dimensional metadata object. - This may be a different shape to the orginal values array being + This may be a different shape to the original values array being copied. """ @@ -340,9 +350,9 @@ def __eq__(self, other): # If the other object has a means of getting its definition, then do # the comparison, otherwise return a NotImplemented to let Python try # to resolve the operator elsewhere. - if hasattr(other, "_as_defn"): + if hasattr(other, "metadata"): # metadata comparison - eq = self._as_defn() == other._as_defn() + eq = self.metadata == other.metadata # data values comparison if eq and eq is not NotImplemented: eq = iris.util.array_equal( @@ -367,17 +377,6 @@ def __ne__(self, other): result = not result return result - def _as_defn(self): - defn = _DMDefn( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - ) - - return defn - # Must supply __hash__ as Python 3 does not enable it if __eq__ is defined. # NOTE: Violates "objects which compare equal must have the same hash". # We ought to remove this, as equality of two dimensional metadata can @@ -688,7 +687,7 @@ def __init__( standard_name=None, long_name=None, var_name=None, - units="no-unit", + units=None, attributes=None, ): """ @@ -714,6 +713,12 @@ def __init__( A dictionary containing other cf and user-defined attributes. """ + # Configure the metadata manager. + if not hasattr(self, "_metadata_manager"): + self._metadata_manager = metadata_manager_factory( + AncillaryVariableMetadata + ) + super().__init__( values=data, standard_name=standard_name, @@ -788,7 +793,7 @@ def __init__( standard_name=None, long_name=None, var_name=None, - units="1", + units=None, attributes=None, measure=None, ): @@ -821,6 +826,9 @@ def __init__( 'area' and 'volume'. The default is 'area'. """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CellMeasureMetadata) + super().__init__( data=data, standard_name=standard_name, @@ -838,14 +846,14 @@ def __init__( @property def measure(self): - return self._measure + return self._metadata_manager.measure @measure.setter def measure(self, measure): if measure not in ["area", "volume"]: emsg = f"measure must be 'area' or 'volume', got {measure!r}" raise ValueError(emsg) - self._measure = measure + self._metadata_manager.measure = measure def __str__(self): result = repr(self) @@ -864,17 +872,6 @@ def __repr__(self): ) return result - def _as_defn(self): - defn = CellMeasureDefn( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.measure, - ) - return defn - def cube_dims(self, cube): """ Return the cube dimensions of this CellMeasure. @@ -895,160 +892,6 @@ def xml_element(self, doc): return element -class CoordDefn( - namedtuple( - "CoordDefn", - [ - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "coord_system", - "climatological", - ], - ) -): - """ - Criterion for identifying a specific type of :class:`DimCoord` or - :class:`AuxCoord` based on its metadata. - - """ - - __slots__ = () - - def name(self, default="unknown"): - """ - Returns a human-readable name. - - First it tries self.standard_name, then it tries the 'long_name' - attribute, then the 'var_name' attribute, before falling back to - the value of `default` (which itself defaults to 'unknown'). - - """ - return self.standard_name or self.long_name or self.var_name or default - - def __lt__(self, other): - if not isinstance(other, CoordDefn): - return NotImplemented - - def _sort_key(defn): - # Emulate Python 2 behaviour with None - return ( - defn.standard_name is not None, - defn.standard_name, - defn.long_name is not None, - defn.long_name, - defn.var_name is not None, - defn.var_name, - defn.units is not None, - defn.units, - defn.coord_system is not None, - defn.coord_system, - ) - - return _sort_key(self) < _sort_key(other) - - -class CellMeasureDefn( - namedtuple( - "CellMeasureDefn", - [ - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "measure", - ], - ) -): - """ - Criterion for identifying a specific type of :class:`CellMeasure` - based on its metadata. - - """ - - __slots__ = () - - def name(self, default="unknown"): - """ - Returns a human-readable name. - - First it tries self.standard_name, then it tries the 'long_name' - attribute, then the 'var_name' attribute, before falling back to - the value of `default` (which itself defaults to 'unknown'). - - """ - return self.standard_name or self.long_name or self.var_name or default - - def __lt__(self, other): - if not isinstance(other, CellMeasureDefn): - return NotImplemented - - def _sort_key(defn): - # Emulate Python 2 behaviour with None - return ( - defn.standard_name is not None, - defn.standard_name, - defn.long_name is not None, - defn.long_name, - defn.var_name is not None, - defn.var_name, - defn.units is not None, - defn.units, - defn.measure is not None, - defn.measure, - ) - - return _sort_key(self) < _sort_key(other) - - -class _DMDefn( - namedtuple( - "DMDefn", - ["standard_name", "long_name", "var_name", "units", "attributes",], - ) -): - """ - Criterion for identifying a specific type of :class:`_DimensionalMetadata` - based on its metadata. - - """ - - __slots__ = () - - def name(self, default="unknown"): - """ - Returns a human-readable name. - - First it tries self.standard_name, then it tries the 'long_name' - attribute, then the 'var_name' attribute, before falling back to - the value of `default` (which itself defaults to 'unknown'). - - """ - return self.standard_name or self.long_name or self.var_name or default - - def __lt__(self, other): - if not isinstance(other, _DMDefn): - return NotImplemented - - def _sort_key(defn): - # Emulate Python 2 behaviour with None - return ( - defn.standard_name is not None, - defn.standard_name, - defn.long_name is not None, - defn.long_name, - defn.var_name is not None, - defn.var_name, - defn.units is not None, - defn.units, - ) - - return _sort_key(self) < _sort_key(other) - - class CoordExtent( namedtuple( "_CoordExtent", @@ -1440,7 +1283,7 @@ def __init__( standard_name=None, long_name=None, var_name=None, - units="1", + units=None, bounds=None, attributes=None, coord_system=None, @@ -1490,7 +1333,12 @@ def __init__( Will set to True when a climatological time axis is loaded from NetCDF. Always False if no bounds exist. + """ + # Configure the metadata manager. + if not hasattr(self, "_metadata_manager"): + self._metadata_manager = metadata_manager_factory(CoordMetadata) + super().__init__( values=points, standard_name=standard_name, @@ -1589,7 +1437,7 @@ def bounds(self, bounds): # Ensure the bounds are a compatible shape. if bounds is None: self._bounds_dm = None - self._climatological = False + self.climatological = False else: bounds = self._sanitise_array(bounds, 2) if self.shape != bounds.shape[:-1]: @@ -1605,6 +1453,15 @@ def bounds(self, bounds): else: self._bounds_dm.data = bounds + @property + def coord_system(self): + """The coordinate-system of the coordinate.""" + return self._metadata_manager.coord_system + + @coord_system.setter + def coord_system(self, value): + self._metadata_manager.coord_system = value + @property def climatological(self): """ @@ -1615,8 +1472,13 @@ def climatological(self): Always reads as False if there are no bounds. On set, the input value is cast to a boolean, exceptions raised if units are not time units or if there are no bounds. + """ - return self._climatological if self.has_bounds() else False + if not self.has_bounds(): + self._metadata_manager.climatological = False + if not self.units.is_time_reference(): + self._metadata_manager.climatological = False + return self._metadata_manager.climatological @climatological.setter def climatological(self, value): @@ -1634,7 +1496,7 @@ def climatological(self, value): emsg = "Cannot set climatological coordinate, no bounds exist." raise ValueError(emsg) - self._climatological = value + self._metadata_manager.climatological = value def lazy_points(self): """ @@ -1722,18 +1584,6 @@ def _repr_other_metadata(self): result += ", climatological={}".format(self.climatological) return result - def _as_defn(self): - defn = CoordDefn( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.coord_system, - self.climatological, - ) - return defn - # Must supply __hash__ as Python 3 does not enable it if __eq__ is defined. # NOTE: Violates "objects which compare equal must have the same hash". # We ought to remove this, as equality of two coords can *change*, so they @@ -1986,8 +1836,9 @@ def is_compatible(self, other, ignore=None): Args: * other: - An instance of :class:`iris.coords.Coord` or - :class:`iris.coords.CoordDefn`. + An instance of :class:`iris.coords.Coord`, + :class:`iris.common.CoordMetadata` or + :class:`iris.common.DimCoordMetadata`. * ignore: A single attribute key or iterable of attribute keys to ignore when comparing the coordinates. Default is None. To ignore all @@ -2411,7 +2262,7 @@ def from_regular( standard_name=None, long_name=None, var_name=None, - units="1", + units=None, attributes=None, coord_system=None, circular=False, @@ -2442,7 +2293,7 @@ def from_regular( """ points = (zeroth + step) + step * np.arange(count, dtype=np.float32) - _, regular = points_step(points) + _, regular = iris.util.points_step(points) if not regular: points = (zeroth + step) + step * np.arange( count, dtype=np.float64 @@ -2474,7 +2325,7 @@ def __init__( standard_name=None, long_name=None, var_name=None, - units="1", + units=None, bounds=None, attributes=None, coord_system=None, @@ -2486,6 +2337,9 @@ def __init__( read-only points and bounds. """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(DimCoordMetadata) + super().__init__( points, standard_name=standard_name, @@ -2499,7 +2353,7 @@ def __init__( ) #: Whether the coordinate wraps by ``coord.units.modulus``. - self.circular = bool(circular) + self.circular = circular def __deepcopy__(self, memo): """ @@ -2515,6 +2369,14 @@ def __deepcopy__(self, memo): new_coord._bounds_dm.data.flags.writeable = False return new_coord + @property + def circular(self): + return self._metadata_manager.circular + + @circular.setter + def circular(self, circular): + self._metadata_manager.circular = bool(circular) + def copy(self, points=None, bounds=None): new_coord = super().copy(points=points, bounds=bounds) # Make the arrays read-only. @@ -2524,13 +2386,13 @@ def copy(self, points=None, bounds=None): return new_coord def __eq__(self, other): - # TODO investigate equality of AuxCoord and DimCoord if circular is - # False. result = NotImplemented if isinstance(other, DimCoord): - result = ( - Coord.__eq__(self, other) and self.circular == other.circular - ) + # The "circular" member participates in DimCoord to DimCoord + # equivalence. We require to do this explicitly here + # as the "circular" member does NOT participate in + # DimCoordMetadata to DimCoordMetadata equivalence. + result = self.circular == other.circular and super().__eq__(other) return result # The __ne__ operator from Coord implements the not __eq__ method. @@ -2779,19 +2641,20 @@ def __init__(self, method, coords=None, intervals=None, comments=None): "'method' must be a string - got a '%s'" % type(method) ) - default_name = CFVariableMixin._DEFAULT_NAME + default_name = BaseMetadata.DEFAULT_NAME _coords = [] + if coords is None: pass elif isinstance(coords, Coord): _coords.append(coords.name(token=True)) elif isinstance(coords, str): - _coords.append(CFVariableMixin.token(coords) or default_name) + _coords.append(BaseMetadata.token(coords) or default_name) else: normalise = ( lambda coord: coord.name(token=True) if isinstance(coord, Coord) - else CFVariableMixin.token(coord) or default_name + else BaseMetadata.token(coord) or default_name ) _coords.extend([normalise(coord) for coord in coords]) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 1b1a4d7b9a..cc833f8848 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -9,7 +9,7 @@ """ -from collections import namedtuple, OrderedDict +from collections import OrderedDict from collections.abc import ( Iterable, Container, @@ -29,56 +29,29 @@ import numpy as np import numpy.ma as ma -from iris._cube_coord_common import CFVariableMixin import iris._concatenate import iris._constraints from iris._data_manager import DataManager import iris._lazy_data as _lazy - import iris._merge import iris.analysis from iris.analysis.cartography import wrap_lons import iris.analysis.maths import iris.aux_factory +from iris.common import ( + CFVariableMixin, + CoordMetadata, + CubeMetadata, + DimCoordMetadata, + metadata_manager_factory, +) import iris.coord_systems import iris.coords import iris.exceptions import iris.util -__all__ = ["Cube", "CubeList", "CubeMetadata"] - - -class CubeMetadata( - namedtuple( - "CubeMetadata", - [ - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "cell_methods", - ], - ) -): - """ - Represents the phenomenon metadata for a single :class:`Cube`. - - """ - - __slots__ = () - - def name(self, default="unknown"): - """ - Returns a human-readable name. - - First it tries self.standard_name, then it tries the 'long_name' - attribute, then the 'var_name' attribute, before falling back to - the value of `default` (which itself defaults to 'unknown'). - - """ - return self.standard_name or self.long_name or self.var_name or default +__all__ = ["Cube", "CubeList"] # The XML namespace to use for CubeML documents @@ -864,6 +837,9 @@ def __init__( if isinstance(data, str): raise TypeError("Invalid data type: {!r}.".format(data)) + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(CubeMetadata) + # Initialise the cube data manager. self._data_manager = DataManager(data) @@ -930,43 +906,15 @@ def __init__( self.add_ancillary_variable(ancillary_variable, dims) @property - def metadata(self): + def _names(self): """ - An instance of :class:`CubeMetadata` describing the phenomenon. - - This property can be updated with any of: - - another :class:`CubeMetadata` instance, - - a tuple/dict which can be used to make a :class:`CubeMetadata`, - - or any object providing the attributes exposed by - :class:`CubeMetadata`. + A tuple containing the value of each name participating in the identity + of a :class:`iris.cube.Cube`. This includes the standard name, + long name, NetCDF variable name, and the STASH from the attributes + dictionary. """ - return CubeMetadata( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.cell_methods, - ) - - @metadata.setter - def metadata(self, value): - try: - value = CubeMetadata(**value) - except TypeError: - try: - value = CubeMetadata(*value) - except TypeError: - missing_attrs = [ - field - for field in CubeMetadata._fields - if not hasattr(value, field) - ] - if missing_attrs: - raise TypeError("Invalid/incomplete metadata") - for name in CubeMetadata._fields: - setattr(self, name, getattr(value, name)) + return self._metadata_manager._names def is_compatible(self, other, ignore=None): """ @@ -1186,7 +1134,7 @@ def add_cell_measure(self, cell_measure, data_dims=None): data_dims = self._check_multi_dim_metadata(cell_measure, data_dims) self._cell_measures_and_dims.append((cell_measure, data_dims)) self._cell_measures_and_dims.sort( - key=lambda cm_dims: (cm_dims[0]._as_defn(), cm_dims[1]) + key=lambda cm_dims: (cm_dims[0].metadata, cm_dims[1]) ) def add_ancillary_variable(self, ancillary_variable, data_dims=None): @@ -1200,6 +1148,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): the cube Kwargs: + * data_dims Integer or iterable of integers giving the data dimensions spanned by the ancillary variable. @@ -1207,6 +1156,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): Raises a ValueError if an ancillary variable with identical metadata already exists on the cube. """ + if self.ancillary_variables(ancillary_variable): raise ValueError("Duplicate ancillary variables not permitted") @@ -1217,7 +1167,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): (ancillary_variable, data_dims) ) self._ancillary_variables_and_dims.sort( - key=lambda av_dims: (av_dims[0]._as_defn(), av_dims[1]) + key=lambda av_dims: (av_dims[0].metadata, av_dims[1]) ) def add_dim_coord(self, dim_coord, data_dim): @@ -1301,6 +1251,9 @@ def _remove_coord(self, coord): for coord_, dims in self._aux_coords_and_dims if coord_ is not coord ] + for aux_factory in self.aux_factories: + if coord.metadata == aux_factory.metadata: + self.remove_aux_factory(aux_factory) def remove_coord(self, coord): """ @@ -1333,7 +1286,7 @@ def remove_cell_measure(self, cell_measure): (a) a :attr:`standard_name`, :attr:`long_name`, or :attr:`var_name`. Defaults to value of `default` (which itself defaults to `unknown`) as defined in - :class:`iris._cube_coord_common.CFVariableMixin`. + :class:`iris.common.CFVariableMixin`. (b) a cell_measure instance with metadata equal to that of the desired cell_measures. @@ -1426,11 +1379,11 @@ def coord_dims(self, coord): ] # Search derived aux coords - target_defn = coord._as_defn() if not matches: + target_metadata = coord.metadata def match(factory): - return factory._as_defn() == target_defn + return factory.metadata == target_metadata factories = filter(match, self._aux_factories) matches = [ @@ -1586,13 +1539,14 @@ def coords( (a) a :attr:`standard_name`, :attr:`long_name`, or :attr:`var_name`. Defaults to value of `default` (which itself defaults to `unknown`) as defined in - :class:`iris._cube_coord_common.CFVariableMixin`. + :class:`iris.common.CFVariableMixin`. (b) a coordinate instance with metadata equal to that of the desired coordinates. Accepts either a :class:`iris.coords.DimCoord`, :class:`iris.coords.AuxCoord`, - :class:`iris.aux_factory.AuxCoordFactory` - or :class:`iris.coords.CoordDefn`. + :class:`iris.aux_factory.AuxCoordFactory`, + :class:`iris.common.CoordMetadata` or + :class:`iris.common.DimCoordMetadata`. * standard_name The CF standard name of the desired coordinate. If None, does not check for standard name. @@ -1710,14 +1664,17 @@ def attr_filter(coord_): ] if coord is not None: - if isinstance(coord, iris.coords.CoordDefn): - defn = coord + if hasattr(coord, "__class__") and coord.__class__ in ( + CoordMetadata, + DimCoordMetadata, + ): + target_metadata = coord else: - defn = coord._as_defn() + target_metadata = coord.metadata coords_and_factories = [ coord_ for coord_ in coords_and_factories - if coord_._as_defn() == defn + if coord_.metadata == target_metadata ] if contains_dimension is not None: @@ -1883,7 +1840,7 @@ def cell_measures(self, name_or_cell_measure=None): (a) a :attr:`standard_name`, :attr:`long_name`, or :attr:`var_name`. Defaults to value of `default` (which itself defaults to `unknown`) as defined in - :class:`iris._cube_coord_common.CFVariableMixin`. + :class:`iris.common.CFVariableMixin`. (b) a cell_measure instance with metadata equal to that of the desired cell_measures. @@ -1966,7 +1923,7 @@ def ancillary_variables(self, name_or_ancillary_variable=None): (a) a :attr:`standard_name`, :attr:`long_name`, or :attr:`var_name`. Defaults to value of `default` (which itself defaults to `unknown`) as defined in - :class:`iris._cube_coord_common.CFVariableMixin`. + :class:`iris.common.CFVariableMixin`. (b) a ancillary_variable instance with metadata equal to that of the desired ancillary_variables. @@ -2047,11 +2004,13 @@ def cell_methods(self): done on the phenomenon. """ - return self._cell_methods + return self._metadata_manager.cell_methods @cell_methods.setter def cell_methods(self, cell_methods): - self._cell_methods = tuple(cell_methods) if cell_methods else tuple() + self._metadata_manager.cell_methods = ( + tuple(cell_methods) if cell_methods else tuple() + ) def core_data(self): """ @@ -2876,7 +2835,7 @@ def intersection(self, *args, **kwargs): For ranges defined over "circular" coordinates (i.e. those where the `units` attribute has a modulus defined) the cube - will be "rolled" to fit where neccesary. + will be "rolled" to fit where necessary. .. warning:: @@ -4079,7 +4038,7 @@ def aggregated_by(self, coords, aggregator, **kwargs): ) coords = self._as_list_of_coords(coords) - for coord in sorted(coords, key=lambda coord: coord._as_defn()): + for coord in sorted(coords, key=lambda coord: coord.metadata): if coord.ndim > 1: msg = ( "Cannot aggregate_by coord %s as it is " @@ -4195,7 +4154,7 @@ def aggregated_by(self, coords, aggregator, **kwargs): for coord in groupby.coords: if ( dim_coord is not None - and dim_coord._as_defn() == coord._as_defn() + and dim_coord.metadata == coord.metadata and isinstance(coord, iris.coords.DimCoord) ): aggregateby_cube.add_dim_coord( diff --git a/lib/iris/etc/logging.yaml b/lib/iris/etc/logging.yaml new file mode 100644 index 0000000000..a73906e7db --- /dev/null +++ b/lib/iris/etc/logging.yaml @@ -0,0 +1,45 @@ +version: 1 + +formatters: + basic: + format: "%(asctime)s %(name)s %(levelname)s - %(message)s" + datefmt: "%d-%m-%Y %H:%M:%S" + basic-cls-func: + format: "%(asctime)s %(name)s %(levelname)s - %(message)s [%(cls)s.%(funcName)s]" + datefmt: "%d-%m-%Y %H:%M:%S" + basic-func: + format: "%(asctime)s %(name)s %(levelname)s - %(message)s [%(funcName)s]" + +handlers: + console: + class: logging.StreamHandler + formatter: basic + stream: ext://sys.stdout + console-cls-func: + class: logging.StreamHandler + formatter: basic-cls-func + stream: ext://sys.stdout + console-func: + class: logging.StreamHandler + formatter: basic-func + stream: ext://sys.stdout + +loggers: + iris.common.metadata: + level: INFO + handlers: [console-cls-func] + propagate: no + iris.common.resolve: + level: INFO + handlers: [console-func] + propagate: no + matplotlib: + level: INFO + PIL: + level: INFO + urllib3: + level: INFO + +root: + level: INFO + handlers: [console] diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index 2992360247..e357f2ca9d 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -68,8 +68,8 @@ def relevel(cube, src_levels, tgt_levels, axis=None, interpolator=None): that are generally monotonic in the direction of interpolation, such as height/pressure or salinity/depth. - Parameters - ---------- + Args: + cube : :class:`~iris.cube.Cube` The phenomenon data to be re-levelled. diff --git a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb index 5ecfeb77b1..2afc823795 100644 --- a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb +++ b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb @@ -1173,6 +1173,7 @@ fc_extras import numpy.ma as ma import iris.aux_factory + from iris.common.mixin import _get_valid_standard_name import iris.coords import iris.coord_systems import iris.fileformats.cf as cf @@ -1182,7 +1183,6 @@ fc_extras import iris.exceptions import iris.std_names import iris.util - from iris._cube_coord_common import get_valid_standard_name from iris._lazy_data import as_lazy_data @@ -1195,6 +1195,8 @@ fc_extras UD_UNITS_LON = ['degrees_east', 'degree_east', 'degree_e', 'degrees_e', 'degreee', 'degreese', 'degrees', 'degrees east', 'degree east', 'degree e', 'degrees e'] + UNKNOWN_UNIT_STRING = "?" + NO_UNIT_STRING = "-" # # CF Dimensionless Vertical Coordinates @@ -1298,7 +1300,7 @@ fc_extras if standard_name is not None: try: - cube.standard_name = get_valid_standard_name(standard_name) + cube.standard_name = _get_valid_standard_name(standard_name) except ValueError: if cube.long_name is not None: cube.attributes['invalid_standard_name'] = standard_name @@ -1651,9 +1653,9 @@ fc_extras ################################################################################ def get_attr_units(cf_var, attributes): - attr_units = getattr(cf_var, CF_ATTR_UNITS, cf_units._UNIT_DIMENSIONLESS) + attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING) if not attr_units: - attr_units = '1' + attr_units = UNKNOWN_UNIT_STRING # Sanitise lat/lon units. if attr_units in UD_UNITS_LAT or attr_units in UD_UNITS_LON: @@ -1668,10 +1670,10 @@ fc_extras cf_var.cf_name, attr_units) warnings.warn(msg) attributes['invalid_units'] = attr_units - attr_units = cf_units._UNKNOWN_UNIT_STRING + attr_units = UNKNOWN_UNIT_STRING if np.issubdtype(cf_var.dtype, np.str_): - attr_units = cf_units._NO_UNIT_STRING + attr_units = NO_UNIT_STRING # Get any assoicated calendar for a time reference coordinate. if cf_units.as_unit(attr_units).is_time_reference(): @@ -1693,7 +1695,7 @@ fc_extras if standard_name is not None: try: - standard_name = get_valid_standard_name(standard_name) + standard_name = _get_valid_standard_name(standard_name) except ValueError: if long_name is not None: attributes['invalid_standard_name'] = standard_name diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 1db4e6c61e..75f328a80e 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -10,7 +10,7 @@ References: [CF] NetCDF Climate and Forecast (CF) Metadata conventions, Version 1.5, October, 2010. -[NUG] NetCDF User's Guide, http://www.unidata.ucar.edu/software/netcdf/docs/netcdf.html +[NUG] NetCDF User's Guide, https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/ """ diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index 0d9d149664..0464eb37ed 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -882,7 +882,7 @@ def load_NAMEIII_timeseries(filename): for i, data_list in enumerate(data_lists): data_list.append(float(vals[i + 1])) - data_arrays = [np.array(l) for l in data_lists] + data_arrays = [np.array(dl) for dl in data_lists] time_array = np.array(time_list) tdim = NAMECoord(name="time", dimension=0, values=time_array) @@ -955,7 +955,7 @@ def load_NAMEII_timeseries(filename): for i, data_list in enumerate(data_lists): data_list.append(float(vals[i + 2])) - data_arrays = [np.array(l) for l in data_lists] + data_arrays = [np.array(dl) for dl in data_lists] time_array = np.array(time_list) tdim = NAMECoord(name="time", dimension=0, values=time_array) @@ -1111,7 +1111,7 @@ def load_NAMEIII_version2(filename): for i, data_list in enumerate(data_lists): data_list.append(float(vals[i + datacol1])) - data_arrays = [np.array(l) for l in data_lists] + data_arrays = [np.array(dl) for dl in data_lists] # Convert Z and T arrays into arrays of indices zind = [] diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 4d7ddedc61..f34dc45e72 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -22,6 +22,7 @@ import warnings import dask.array as da +import cf_units import netCDF4 import numpy as np import numpy.ma as ma @@ -959,7 +960,7 @@ def write( than global attributes. * unlimited_dimensions (iterable of strings and/or - :class:`iris.coords.Coord` objects): + :class:`iris.coords.Coord` objects): List of coordinate names (or coordinate objects) corresponding to coordinate dimensions of `cube` to save with the NetCDF dimension variable length 'UNLIMITED'. By default, no @@ -992,10 +993,10 @@ def write( Used to manually specify the HDF5 chunksizes for each dimension of the variable. A detailed discussion of HDF chunking and I/O performance is available here: - http://www.hdfgroup.org/HDF5/doc/H5.user/Chunking.html. Basically, - you want the chunk size for each dimension to match as closely as - possible the size of the data block that users will read from the - file. `chunksizes` cannot be set if `contiguous=True`. + https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html. + Basically, you want the chunk size for each dimension to match + as closely as possible the size of the data block that users will + read from the file. `chunksizes` cannot be set if `contiguous=True`. * endian (string): Used to control whether the data is stored in little or big endian @@ -1760,7 +1761,7 @@ def _inner_create_cf_cellmeasure_or_ancil_variable( # Add the data to the CF-netCDF variable. cf_var[:] = data - if dimensional_metadata.units != "unknown": + if dimensional_metadata.units.is_udunits(): _setncattr(cf_var, "units", str(dimensional_metadata.units)) if dimensional_metadata.standard_name is not None: @@ -1926,7 +1927,7 @@ def _create_cf_coord_variable(self, cube, dimension_names, coord): # Deal with CF-netCDF units and standard name. standard_name, long_name, units = self._cf_coord_identity(coord) - if units != "unknown": + if cf_units.as_unit(units).is_udunits(): _setncattr(cf_var, "units", units) if standard_name is not None: @@ -2371,7 +2372,7 @@ def store(data, cf_var, fill_value): if cube.long_name: _setncattr(cf_var, "long_name", cube.long_name) - if cube.units != "unknown": + if cube.units.is_udunits(): _setncattr(cf_var, "units", str(cube.units)) # Add the CF-netCDF calendar attribute. @@ -2506,7 +2507,7 @@ def save( than global attributes. * unlimited_dimensions (iterable of strings and/or - :class:`iris.coords.Coord` objects): + :class:`iris.coords.Coord` objects): List of coordinate names (or coordinate objects) corresponding to coordinate dimensions of `cube` to save with the NetCDF dimension variable length 'UNLIMITED'. By default, no unlimited dimensions are @@ -2538,7 +2539,7 @@ def save( * chunksizes (tuple of int): Used to manually specify the HDF5 chunksizes for each dimension of the variable. A detailed discussion of HDF chunking and I/O performance is - available here: http://www.hdfgroup.org/HDF5/doc/H5.user/Chunking.html. + available here: https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html. Basically, you want the chunk size for each dimension to match as closely as possible the size of the data block that users will read from the file. `chunksizes` cannot be set if `contiguous=True`. diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index deb4ac862c..4cf8755bb9 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -233,9 +233,8 @@ def reference_time(cube, field): field.dt_hour, field.dt_minute, ) - ref_time_coord = DimCoord( - np.array(TIME_UNIT.date2num(data_date), dtype=np.int64), + np.array(np.round(TIME_UNIT.date2num(data_date)), dtype=np.int64), standard_name="forecast_reference_time", units=TIME_UNIT, ) @@ -301,7 +300,9 @@ def experiment(cube, field): """Add an 'experiment number' to the cube, if present in the field.""" if not is_missing(field, field.experiment_num): cube.add_aux_coord( - DimCoord(field.experiment_num, long_name="experiment_number") + DimCoord( + field.experiment_num, long_name="experiment_number", units="1" + ) ) @@ -412,8 +413,8 @@ def coord_system(field, handle_metadata_errors): ) if any([is_missing(field, v) for v in crs_args]): warnings.warn( - f"Coordinate Reference System is not completely defined. " - f"Plotting and reprojection may be impaired." + "Coordinate Reference System is not completely defined. " + "Plotting and reprojection may be impaired." ) coord_sys = iris.coord_systems.TransverseMercator( *crs_args, iris.coord_systems.GeogCS(**ellipsoid), @@ -592,7 +593,9 @@ def ensemble_member(cube, field): if not is_missing(field, ensemble_member_value): cube.add_aux_coord( DimCoord( - np.array(ensemble_member_value, dtype=np.int32), "realization" + np.array(ensemble_member_value, dtype=np.int32), + "realization", + units="1", ) ) diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index c0a4081970..53d9f4dc35 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -147,6 +147,7 @@ def _convert_vertical_coords( model_level_number, standard_name="model_level_number", attributes={"positive": "down"}, + units="1", ) coords_and_dims.append((coord, dim)) @@ -197,6 +198,7 @@ def _convert_vertical_coords( model_level_number, long_name="soil_model_level_number", attributes={"positive": "down"}, + units="1", ) coords_and_dims.append((coord, dim)) elif np.any(brsvd1 != brlev): @@ -235,6 +237,7 @@ def _convert_vertical_coords( model_level_number, standard_name="model_level_number", attributes={"positive": "up"}, + units="1", ) level_pressure = _dim_or_aux( bhlev, @@ -243,7 +246,10 @@ def _convert_vertical_coords( bounds=np.vstack((bhrlev, brsvd2)).T, ) sigma = AuxCoord( - blev, long_name="sigma", bounds=np.vstack((brlev, brsvd1)).T + blev, + long_name="sigma", + bounds=np.vstack((brlev, brsvd1)).T, + units="1", ) coords_and_dims.extend( [(model_level_number, dim), (level_pressure, dim), (sigma, dim)] @@ -265,6 +271,7 @@ def _convert_vertical_coords( model_level_number, standard_name="model_level_number", attributes={"positive": "up"}, + units="1", ) level_height = _dim_or_aux( blev, @@ -274,7 +281,10 @@ def _convert_vertical_coords( attributes={"positive": "up"}, ) sigma = AuxCoord( - bhlev, long_name="sigma", bounds=np.vstack((bhrlev, brsvd2)).T + bhlev, + long_name="sigma", + bounds=np.vstack((bhrlev, brsvd2)).T, + units="1", ) coords_and_dims.extend( [(model_level_number, dim), (level_height, dim), (sigma, dim)] @@ -627,7 +637,7 @@ def _convert_time_coords( def date2hours(t): epoch_hours = _epoch_date_hours(epoch_hours_unit, t) if t.minute == 0 and t.second == 0: - epoch_hours = round(epoch_hours) + epoch_hours = np.around(epoch_hours) return epoch_hours def date2year(t_in): @@ -846,7 +856,7 @@ def _convert_scalar_realization_coords(lbrsvd4): coords_and_dims = [] if lbrsvd4 != 0: coords_and_dims.append( - (DimCoord(lbrsvd4, standard_name="realization"), None) + (DimCoord(lbrsvd4, standard_name="realization", units="1"), None) ) return coords_and_dims @@ -1078,7 +1088,7 @@ def _all_other_rules(f): and f.lbmon == f.lbmond ): aux_coords_and_dims.append( - (AuxCoord(f.lbmon, long_name="month_number"), None) + (AuxCoord(f.lbmon, long_name="month_number", units="1"), None) ) aux_coords_and_dims.append( ( diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 1e6cac691e..07ed5eb8ce 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -28,7 +28,7 @@ class ConcreteReferenceTarget: """Everything you need to make a real Cube for a named reference.""" def __init__(self, name, transform=None): - #: The name used to connect references with referencees. + #: The name used to connect references with references. self.name = name #: An optional transformation to apply to the cubes. self.transform = transform diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 36f79d32d3..31cd862d85 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -334,7 +334,7 @@ def find_saver(filespec): def save(source, target, saver=None, **kwargs): """ - Save one or more Cubes to file (or other writable). + Save one or more Cubes to file (or other writeable). Iris currently supports three file formats for saving, which it can recognise by filename extension: @@ -353,7 +353,7 @@ def save(source, target, saver=None, **kwargs): * source - A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or sequence of cubes. - * target - A filename (or writable, depending on file format). + * target - A filename (or writeable, depending on file format). When given a filename or file, Iris can determine the file format. diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index 6cca135d21..ea2d939280 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -302,12 +302,13 @@ def __init__(self, coord): self._coord = coord # Methods of contained class we need to expose/use. - def _as_defn(self): - return self._coord._as_defn() + @property + def metadata(self): + return self._coord.metadata - # Methods of contained class we want to overide/customise. + # Methods of contained class we want to override/customise. def __eq__(self, other): - return self._coord._as_defn() == other._as_defn() + return self._coord.metadata == other.metadata # Force use of __eq__ for set operations. def __hash__(self): diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 9dff582bc4..36afe906dc 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -168,7 +168,7 @@ def guess_axis(coord): if isinstance(coord, iris.coords.DimCoord) ] if aux_coords: - aux_coords.sort(key=lambda coord: coord._as_defn()) + aux_coords.sort(key=lambda coord: coord.metadata) coords[dim] = aux_coords[0] # If plotting a 2 dimensional plot, check for 2d coordinates @@ -183,7 +183,7 @@ def guess_axis(coord): coord for coord in two_dim_coords if coord.ndim == 2 ] if len(two_dim_coords) >= 2: - two_dim_coords.sort(key=lambda coord: coord._as_defn()) + two_dim_coords.sort(key=lambda coord: coord.metadata) coords = two_dim_coords[:2] if mode == iris.coords.POINT_MODE: diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 66966daaf4..b5b80a97ef 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -21,6 +21,7 @@ import codecs import collections +from collections.abc import Mapping import contextlib import datetime import difflib @@ -76,13 +77,6 @@ else: GDAL_AVAILABLE = True -try: - from iris_grib.message import GribMessage - - GRIB_AVAILABLE = True -except ImportError: - GRIB_AVAILABLE = False - try: import iris_sample_data # noqa except ImportError: @@ -799,7 +793,7 @@ def _unique_id(self): bits[0] = os.path.splitext(file_name)[0] folder, location = os.path.split(path) bits = [location] + bits - while location not in ["iris", "example_tests"]: + while location not in ["iris", "gallery_tests"]: folder, location = os.path.split(folder) bits = [location] + bits test_id = ".".join(bits) @@ -1011,6 +1005,78 @@ def assertArrayShapeStats(self, result, shape, mean, std_dev, rtol=1e-6): self.assertArrayAllClose(result.data.mean(), mean, rtol=rtol) self.assertArrayAllClose(result.data.std(), std_dev, rtol=rtol) + def assertDictEqual(self, lhs, rhs, msg=None): + """ + This method overrides unittest.TestCase.assertDictEqual (new in Python3.1) + in order to cope with dictionary comparison where the value of a key may + be a numpy array. + + """ + if not isinstance(lhs, Mapping): + emsg = ( + f"Provided LHS argument is not a 'Mapping', got {type(lhs)}." + ) + self.fail(emsg) + + if not isinstance(rhs, Mapping): + emsg = ( + f"Provided RHS argument is not a 'Mapping', got {type(rhs)}." + ) + self.fail(emsg) + + if set(lhs.keys()) != set(rhs.keys()): + emsg = f"{lhs!r} != {rhs!r}." + self.fail(emsg) + + for key in lhs.keys(): + lvalue, rvalue = lhs[key], rhs[key] + + if ma.isMaskedArray(lvalue) or ma.isMaskedArray(rvalue): + if not ma.isMaskedArray(lvalue): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {type(lvalue)} and " + f"the RHS value has type {ma.core.MaskedArray}." + ) + raise AssertionError(emsg) + + if not ma.isMaskedArray(rvalue): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {ma.core.MaskedArray} and " + f"the RHS value has type {type(lvalue)}." + ) + raise AssertionError(emsg) + + self.assertMaskedArrayEqual(lvalue, rvalue) + elif isinstance(lvalue, np.ndarray) or isinstance( + rvalue, np.ndarray + ): + if not isinstance(lvalue, np.ndarray): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {type(lvalue)} and " + f"the RHS value has type {np.ndarray}." + ) + raise AssertionError(emsg) + + if not isinstance(rvalue, np.ndarray): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {np.ndarray} and " + f"the RHS value has type {type(rvalue)}." + ) + raise AssertionError(emsg) + + self.assertArrayEqual(lvalue, rvalue) + else: + if lvalue != rvalue: + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"{lvalue!r} != {rvalue!r}." + ) + raise AssertionError(emsg) + # An environment variable controls whether test timings are output. # @@ -1181,12 +1247,6 @@ class MyPlotTests(test.GraphicsTest): return skip(fn) -skip_grib = unittest.skipIf( - not GRIB_AVAILABLE, - 'Test(s) require "iris-grib" package, ' "which is not available.", -) - - skip_sample_data = unittest.skipIf( not SAMPLE_DATA_AVAILABLE, ('Test(s) require "iris-sample-data", ' "which is not available."), diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index 0a4d186b39..ba50e389a8 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -9,7 +9,7 @@ # before importing anything else. import iris.tests as tests -from collections import Iterable +from collections.abc import Iterable import tempfile import shutil @@ -377,7 +377,8 @@ def callback(cube, collation, filename): # Make an 'expected' from selected fields, with the expected attribute. expected = CubeList([flds[1], flds[3]]).merge() if not self.do_fast_loads: - expected[0].attributes["LBVC"] = 8 + # This is actually a NumPy int32, so honour that here. + expected[0].attributes["LBVC"] = np.int32(8) else: expected[0].attributes["A_LBVC"] = [8, 8] diff --git a/lib/iris/tests/integration/format_interop/test_name_grib.py b/lib/iris/tests/integration/format_interop/test_name_grib.py deleted file mode 100644 index 63889b879d..0000000000 --- a/lib/iris/tests/integration/format_interop/test_name_grib.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for NAME to GRIB2 interoperability.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -import numpy as np -import warnings - -import iris - - -def name_cb(cube, field, filename): - # NAME files give the time point at the end of the range but Iris' - # GRIB loader creates it in the middle (the GRIB file itself doesn't - # encode a time point). Here we make them consistent so we can - # easily compare them. - t_coord = cube.coord("time") - t_coord.points = t_coord.bounds[0][1] - fp_coord = cube.coord("forecast_period") - fp_coord.points = fp_coord.bounds[0][1] - # NAME contains extra vertical meta-data. - z_coord = cube.coords("height") - if z_coord: - z_coord[0].standard_name = "height" - z_coord[0].long_name = "height above ground level" - - -@tests.skip_grib -class TestNameToGRIB(tests.IrisTest): - def check_common(self, name_cube, grib_cube): - self.assertTrue(np.allclose(name_cube.data, name_cube.data)) - self.assertTrue( - np.allclose( - name_cube.coord("latitude").points, - grib_cube.coord("latitude").points, - ) - ) - self.assertTrue( - np.allclose( - name_cube.coord("longitude").points, - grib_cube.coord("longitude").points - 360, - ) - ) - - for c in ["height", "time"]: - if name_cube.coords(c): - self.assertEqual(name_cube.coord(c), grib_cube.coord(c)) - - @tests.skip_data - def test_name2_field(self): - filepath = tests.get_data_path(("NAME", "NAMEII_field.txt")) - name_cubes = iris.load(filepath) - - # There is a known load/save problem with numerous - # gribapi/eccodes versions and - # zero only data, where min == max. - # This may be a problem with data scaling. - for i, name_cube in enumerate(name_cubes): - data = name_cube.data - if np.min(data) == np.max(data): - msg = ( - 'NAMEII cube #{}, "{}" has empty data : ' - "SKIPPING test for this cube, as save/load will " - "not currently work." - ) - warnings.warn(msg.format(i, name_cube.name())) - continue - - with self.temp_filename(".grib2") as temp_filename: - iris.save(name_cube, temp_filename) - grib_cube = iris.load_cube(temp_filename, callback=name_cb) - self.check_common(name_cube, grib_cube) - self.assertCML( - grib_cube, - tests.get_result_path( - ( - "integration", - "name_grib", - "NAMEII", - "{}_{}.cml".format(i, name_cube.name()), - ) - ), - ) - - @tests.skip_data - def test_name3_field(self): - filepath = tests.get_data_path(("NAME", "NAMEIII_field.txt")) - name_cubes = iris.load(filepath) - for i, name_cube in enumerate(name_cubes): - with self.temp_filename(".grib2") as temp_filename: - iris.save(name_cube, temp_filename) - grib_cube = iris.load_cube(temp_filename, callback=name_cb) - - self.check_common(name_cube, grib_cube) - self.assertCML( - grib_cube, - tests.get_result_path( - ( - "integration", - "name_grib", - "NAMEIII", - "{}_{}.cml".format(i, name_cube.name()), - ) - ), - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/format_interop/test_pp_grib.py b/lib/iris/tests/integration/format_interop/test_pp_grib.py deleted file mode 100644 index 70d89f834a..0000000000 --- a/lib/iris/tests/integration/format_interop/test_pp_grib.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for PP/GRIB interoperability.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -import iris - - -@tests.skip_grib -class TestBoundedTime(tests.IrisTest): - @tests.skip_data - def test_time_and_forecast_period_round_trip(self): - pp_path = tests.get_data_path( - ("PP", "meanMaxMin", "200806081200__qwpb.T24.pp") - ) - # Choose the first time-bounded Cube in the PP dataset. - original = [ - cube - for cube in iris.load(pp_path) - if cube.coord("time").has_bounds() - ][0] - # Save it to GRIB2 and re-load. - with self.temp_filename(".grib2") as grib_path: - iris.save(original, grib_path) - from_grib = iris.load_cube(grib_path) - # Avoid the downcasting warning when saving to PP. - from_grib.data = from_grib.data.astype("f4") - # Re-save to PP and re-load. - with self.temp_filename(".pp") as pp_path: - iris.save(from_grib, pp_path) - from_pp = iris.load_cube(pp_path) - self.assertEqual(original.coord("time"), from_grib.coord("time")) - self.assertEqual( - original.coord("forecast_period"), - from_grib.coord("forecast_period"), - ) - self.assertEqual(original.coord("time"), from_pp.coord("time")) - self.assertEqual( - original.coord("forecast_period"), from_pp.coord("forecast_period") - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py index 8c6e0f6659..267e5beb50 100644 --- a/lib/iris/tests/integration/test_netcdf.py +++ b/lib/iris/tests/integration/test_netcdf.py @@ -81,7 +81,9 @@ def test_hybrid_height_and_pressure(self): 1200.0, long_name="level_pressure", units="hPa" ) ) - cube.add_aux_coord(iris.coords.DimCoord(0.5, long_name="other sigma")) + cube.add_aux_coord( + iris.coords.DimCoord(0.5, long_name="other sigma", units="1") + ) cube.add_aux_coord( iris.coords.DimCoord( 1000.0, long_name="surface_air_pressure", units="hPa" diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py index 6fbf180ac5..b9b096d782 100644 --- a/lib/iris/tests/integration/test_pp.py +++ b/lib/iris/tests/integration/test_pp.py @@ -299,7 +299,7 @@ def test_hybrid_height_with_non_standard_coords(self): delta_lower, delta, delta_upper = 150, 200, 250 cube = Cube(np.zeros((ny, nx)), "air_temperature") - level_coord = AuxCoord(0, "model_level_number") + level_coord = AuxCoord(0, "model_level_number", units="1") cube.add_aux_coord(level_coord) delta_coord = AuxCoord( delta, @@ -308,7 +308,10 @@ def test_hybrid_height_with_non_standard_coords(self): units="m", ) sigma_coord = AuxCoord( - sigma, bounds=[[sigma_lower, sigma_upper]], long_name="mavis" + sigma, + bounds=[[sigma_lower, sigma_upper]], + long_name="mavis", + units="1", ) surface_altitude_coord = AuxCoord( np.zeros((ny, nx)), "surface_altitude", units="m" @@ -343,7 +346,7 @@ def test_hybrid_pressure_with_non_standard_coords(self): delta_lower, delta, delta_upper = 0.15, 0.2, 0.25 cube = Cube(np.zeros((ny, nx)), "air_temperature") - level_coord = AuxCoord(0, "model_level_number") + level_coord = AuxCoord(0, "model_level_number", units="1") cube.add_aux_coord(level_coord) delta_coord = AuxCoord( delta, @@ -352,7 +355,10 @@ def test_hybrid_pressure_with_non_standard_coords(self): units="Pa", ) sigma_coord = AuxCoord( - sigma, bounds=[[sigma_lower, sigma_upper]], long_name="mavis" + sigma, + bounds=[[sigma_lower, sigma_upper]], + long_name="mavis", + units="1", ) surface_air_pressure_coord = AuxCoord( np.zeros((ny, nx)), "surface_air_pressure", units="Pa" diff --git a/lib/iris/tests/results/analysis/abs.cml b/lib/iris/tests/results/analysis/abs.cml index e92f96e1cb..b0a37b6074 100644 --- a/lib/iris/tests/results/analysis/abs.cml +++ b/lib/iris/tests/results/analysis/abs.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition.cml b/lib/iris/tests/results/analysis/addition.cml index d673e73bb3..4f9600694d 100644 --- a/lib/iris/tests/results/analysis/addition.cml +++ b/lib/iris/tests/results/analysis/addition.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_coord_x.cml b/lib/iris/tests/results/analysis/addition_coord_x.cml index af0c5ecc91..a086b8ad8b 100644 --- a/lib/iris/tests/results/analysis/addition_coord_x.cml +++ b/lib/iris/tests/results/analysis/addition_coord_x.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_coord_y.cml b/lib/iris/tests/results/analysis/addition_coord_y.cml index ba8547b617..266e81c912 100644 --- a/lib/iris/tests/results/analysis/addition_coord_y.cml +++ b/lib/iris/tests/results/analysis/addition_coord_y.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_different_std_name.cml b/lib/iris/tests/results/analysis/addition_different_std_name.cml index cb77adde99..14b0b42dd8 100644 --- a/lib/iris/tests/results/analysis/addition_different_std_name.cml +++ b/lib/iris/tests/results/analysis/addition_different_std_name.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_in_place.cml b/lib/iris/tests/results/analysis/addition_in_place.cml index d673e73bb3..4f9600694d 100644 --- a/lib/iris/tests/results/analysis/addition_in_place.cml +++ b/lib/iris/tests/results/analysis/addition_in_place.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_in_place_coord.cml b/lib/iris/tests/results/analysis/addition_in_place_coord.cml index 6ec39571c1..00dee609eb 100644 --- a/lib/iris/tests/results/analysis/addition_in_place_coord.cml +++ b/lib/iris/tests/results/analysis/addition_in_place_coord.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_scalar.cml b/lib/iris/tests/results/analysis/addition_scalar.cml index d65d7492fe..daf0050069 100644 --- a/lib/iris/tests/results/analysis/addition_scalar.cml +++ b/lib/iris/tests/results/analysis/addition_scalar.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/apply_ifunc.cml b/lib/iris/tests/results/analysis/apply_ifunc.cml index f2bac40826..fe0e394ee6 100644 --- a/lib/iris/tests/results/analysis/apply_ifunc.cml +++ b/lib/iris/tests/results/analysis/apply_ifunc.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml b/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml index 2faa06f4a5..29cb6f611e 100644 --- a/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml +++ b/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/apply_ufunc.cml b/lib/iris/tests/results/analysis/apply_ufunc.cml index f2bac40826..fe0e394ee6 100644 --- a/lib/iris/tests/results/analysis/apply_ufunc.cml +++ b/lib/iris/tests/results/analysis/apply_ufunc.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml b/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml index d4239acbad..7b1511f028 100644 --- a/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml +++ b/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division.cml b/lib/iris/tests/results/analysis/division.cml index bbe6c1eb90..762f51ec0a 100644 --- a/lib/iris/tests/results/analysis/division.cml +++ b/lib/iris/tests/results/analysis/division.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_by_array.cml b/lib/iris/tests/results/analysis/division_by_array.cml index cb77adde99..14b0b42dd8 100644 --- a/lib/iris/tests/results/analysis/division_by_array.cml +++ b/lib/iris/tests/results/analysis/division_by_array.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_by_latitude.cml b/lib/iris/tests/results/analysis/division_by_latitude.cml index 3e2abf69cd..42437d1e36 100644 --- a/lib/iris/tests/results/analysis/division_by_latitude.cml +++ b/lib/iris/tests/results/analysis/division_by_latitude.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_by_longitude.cml b/lib/iris/tests/results/analysis/division_by_longitude.cml index b1a0228dc8..264ce9b793 100644 --- a/lib/iris/tests/results/analysis/division_by_longitude.cml +++ b/lib/iris/tests/results/analysis/division_by_longitude.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_by_singular_coord.cml b/lib/iris/tests/results/analysis/division_by_singular_coord.cml index 7f7835a1be..4c9c58d760 100644 --- a/lib/iris/tests/results/analysis/division_by_singular_coord.cml +++ b/lib/iris/tests/results/analysis/division_by_singular_coord.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_scalar.cml b/lib/iris/tests/results/analysis/division_scalar.cml index cb77adde99..14b0b42dd8 100644 --- a/lib/iris/tests/results/analysis/division_scalar.cml +++ b/lib/iris/tests/results/analysis/division_scalar.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/exponentiate.cml b/lib/iris/tests/results/analysis/exponentiate.cml index a13c6be151..bb825f6714 100644 --- a/lib/iris/tests/results/analysis/exponentiate.cml +++ b/lib/iris/tests/results/analysis/exponentiate.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml index a9e69c291e..f027f2d9f8 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml index a9e69c291e..f027f2d9f8 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml index 34c9e746f6..1bc809ce63 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml @@ -11,7 +11,7 @@ - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml index 34c9e746f6..1bc809ce63 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml @@ -11,7 +11,7 @@ - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml index b3f135cede..cadd1e8b65 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml @@ -9,7 +9,7 @@ - + diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml index b3f135cede..cadd1e8b65 100644 --- a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml @@ -9,7 +9,7 @@ - + diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml index 80fab0e150..059541e208 100644 --- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml +++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml @@ -9,7 +9,7 @@ - + diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml index 80fab0e150..059541e208 100644 --- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml +++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml @@ -9,7 +9,7 @@ - + diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml index 80fab0e150..059541e208 100644 --- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml @@ -9,7 +9,7 @@ - + diff --git a/lib/iris/tests/results/analysis/log.cml b/lib/iris/tests/results/analysis/log.cml index 33214d01f1..c24e071dc5 100644 --- a/lib/iris/tests/results/analysis/log.cml +++ b/lib/iris/tests/results/analysis/log.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/log10.cml b/lib/iris/tests/results/analysis/log10.cml index fbee8f73f0..abd4065526 100644 --- a/lib/iris/tests/results/analysis/log10.cml +++ b/lib/iris/tests/results/analysis/log10.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/log2.cml b/lib/iris/tests/results/analysis/log2.cml index 6371f3925b..d121ad9a9d 100644 --- a/lib/iris/tests/results/analysis/log2.cml +++ b/lib/iris/tests/results/analysis/log2.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/multiply.cml b/lib/iris/tests/results/analysis/multiply.cml index 44996a9138..8fb8658f5d 100644 --- a/lib/iris/tests/results/analysis/multiply.cml +++ b/lib/iris/tests/results/analysis/multiply.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/multiply_different_std_name.cml b/lib/iris/tests/results/analysis/multiply_different_std_name.cml index 49f1779b77..2d89e5882f 100644 --- a/lib/iris/tests/results/analysis/multiply_different_std_name.cml +++ b/lib/iris/tests/results/analysis/multiply_different_std_name.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/sqrt.cml b/lib/iris/tests/results/analysis/sqrt.cml index 3a7bff138c..0dd0fe20b3 100644 --- a/lib/iris/tests/results/analysis/sqrt.cml +++ b/lib/iris/tests/results/analysis/sqrt.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract.cml b/lib/iris/tests/results/analysis/subtract.cml index 7b0740888d..3466578756 100644 --- a/lib/iris/tests/results/analysis/subtract.cml +++ b/lib/iris/tests/results/analysis/subtract.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract_array.cml b/lib/iris/tests/results/analysis/subtract_array.cml index cb77adde99..14b0b42dd8 100644 --- a/lib/iris/tests/results/analysis/subtract_array.cml +++ b/lib/iris/tests/results/analysis/subtract_array.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract_coord_x.cml b/lib/iris/tests/results/analysis/subtract_coord_x.cml index c7aee8395b..060814c6ba 100644 --- a/lib/iris/tests/results/analysis/subtract_coord_x.cml +++ b/lib/iris/tests/results/analysis/subtract_coord_x.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract_coord_y.cml b/lib/iris/tests/results/analysis/subtract_coord_y.cml index 355692b27b..4a9351cf6f 100644 --- a/lib/iris/tests/results/analysis/subtract_coord_y.cml +++ b/lib/iris/tests/results/analysis/subtract_coord_y.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract_scalar.cml b/lib/iris/tests/results/analysis/subtract_scalar.cml index ab8e9d0d60..f458364143 100644 --- a/lib/iris/tests/results/analysis/subtract_scalar.cml +++ b/lib/iris/tests/results/analysis/subtract_scalar.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/third_quartile_foo_1d.cml b/lib/iris/tests/results/analysis/third_quartile_foo_1d.cml index b14c51cfb3..038e7c8668 100644 --- a/lib/iris/tests/results/analysis/third_quartile_foo_1d.cml +++ b/lib/iris/tests/results/analysis/third_quartile_foo_1d.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/analysis/third_quartile_foo_1d_fast_percentile.cml b/lib/iris/tests/results/analysis/third_quartile_foo_1d_fast_percentile.cml index b14c51cfb3..038e7c8668 100644 --- a/lib/iris/tests/results/analysis/third_quartile_foo_1d_fast_percentile.cml +++ b/lib/iris/tests/results/analysis/third_quartile_foo_1d_fast_percentile.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/coord_api/minimal.xml b/lib/iris/tests/results/coord_api/minimal.xml index a35c93dc68..8f93fb6376 100644 --- a/lib/iris/tests/results/coord_api/minimal.xml +++ b/lib/iris/tests/results/coord_api/minimal.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/derived/removed_derived_coord.cml b/lib/iris/tests/results/derived/removed_derived_coord.cml new file mode 100644 index 0000000000..12feb2b643 --- /dev/null +++ b/lib/iris/tests/results/derived/removed_derived_coord.cml @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 884e2b875f..a353507d12 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -1,128 +1,129 @@ { - "example_tests.test_COP_1d_plot.TestCOP1DPlot.test_COP_1d_plot.0": [ + "gallery_tests.test_plot_COP_1d.TestCOP1DPlot.test_plot_COP_1d.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/baff589936602d8ec977334ae4dac9b61a6dc4d99532c86cc2913e36c4cc0f61.png", "https://scitools.github.io/test-iris-imagehash/images/v4/aefec91c3601249cc9b3336dc4c8cdb31a64c6d997b3c0eccb5932d285e42f33.png" ], - "example_tests.test_COP_maps.TestCOPMaps.test_cop_maps.0": [ + "gallery_tests.test_plot_COP_maps.TestCOPMaps.test_plot_cop_maps.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/ea9138db95668524913e6ac168997e85957e917e876396b96a81b5ce3c496935.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913c6ac178995b0d956e917ec76396b96a853dcf94696935.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913e6ac168991f0d956e917ec76396b96a853dcf94796931.png" ], - "example_tests.test_SOI_filtering.TestSOIFiltering.test_soi_filtering.0": [ + "gallery_tests.test_plot_SOI_filtering.TestSOIFiltering.test_plot_soi_filtering.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fac460b9c17b78723e05a5a9954edaf062332799954e9ca5c63b9a52d24e5a95.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fa8460b9c17b78723e05a5a9954edaf062333799954e9ca5c63b9a52d24e4a9d.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fa167295c5e0696a3c17a58c9568da536233da19994cdab487739b4b9b444eb5.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fa56f295c5e0694a3c17a58d95e8da536233da99984c5af4c6739b4a9a444eb4.png" ], - "example_tests.test_TEC.TestTEC.test_TEC.0": [ + "gallery_tests.test_plot_TEC.TestTEC.test_plot_TEC.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/e1a561b69b1a9a42846e9a49c7596e3cce6c907b3a83c17e1b8239b3e4f33bc4.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e1a561b69b1a9e43846e9a49c7596e2cce6c907b3a83c16e1b9231b3e4f33b8c.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e5a761b69a589a4bc46f9e48c65c6631ce61d1ce3982c13739b33193c0ee3f8c.png" ], - "example_tests.test_anomaly_log_colouring.TestAnomalyLogColouring.test_anomaly_log_colouring.0": [ + "gallery_tests.test_plot_anomaly_log_colouring.TestAnomalyLogColouring.test_plot_anomaly_log_colouring.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/ec4464e185a39f93931e9b1e91696d2949dde6e63e26a47a5ad391938d9a5a0c.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ecc164e78e979b19b3789b0885a564a56cc2c65e3ec69469db1bdb9a853c1e24.png" ], - "example_tests.test_atlantic_profiles.TestAtlanticProfiles.test_atlantic_profiles.0": [ + "gallery_tests.test_plot_atlantic_profiles.TestAtlanticProfiles.test_plot_atlantic_profiles.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/9f8260536bd28e1320739437b5f437b0a51d66f4cc5d08fcd00fdb1c93fcb21c.png", "https://scitools.github.io/test-iris-imagehash/images/v4/9f8260536bd28e1320739437b5f437b0a51d66f4cc7c09f4d00fdb1c93fcb21c.png", "https://scitools.github.io/test-iris-imagehash/images/v4/9f8a60536bd28e1320739437b5f437b0a53d66f4cc5c08f4d00fdb1c93fcb21c.png", "https://scitools.github.io/test-iris-imagehash/images/v4/9fc060f462a08f07203ebc77a1f36707e61f4e38d8f7d08a910197fc877cec58.png", "https://scitools.github.io/test-iris-imagehash/images/v4/97c160f462a88f07203ebc77a1e36707e61f4e38d8f3d08a910597fc877cec58.png" ], - "example_tests.test_atlantic_profiles.TestAtlanticProfiles.test_atlantic_profiles.1": [ + "gallery_tests.test_plot_atlantic_profiles.TestAtlanticProfiles.test_plot_atlantic_profiles.1": [ "https://scitools.github.io/test-iris-imagehash/images/v4/a6eaa57e6e81ddf999311ba3b3775e20845d5889c199673b4e22a4675e8ca11c.png", "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991f1322b3761e06845718d89995b3131f32a4765ec2a1cd.png", "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991d1322b3741e2684571cd89995b3131f32a4765ee2a1cc.png" ], - "example_tests.test_coriolis_plot.TestCoriolisPlot.test_coriolis_plot.0": [ + "gallery_tests.test_plot_coriolis.TestCoriolisPlot.test_plot_coriolis.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/e78665de9a699659e55e9965886979966986c5e63e98c19e3a256679e1981a24.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e68665de9a699659c1fe99a5896965966996c46e3e19c1da3a652669c51e1a26.png" ], - "example_tests.test_cross_section.TestCrossSection.test_cross_section.0": [ + "gallery_tests.test_plot_cross_section.TestCrossSection.test_plot_cross_section.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/ea95317b9562e4d1649f5a05856e4ca4da52947e4ea5f13f1b499d42f13b1b41.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ea91b17b9562e4d1609f5a05856e4ca45a52957e5ea5f13b1bca9dc0b17b1ac1.png" ], - "example_tests.test_cross_section.TestCrossSection.test_cross_section.1": [ + "gallery_tests.test_plot_cross_section.TestCrossSection.test_plot_cross_section.1": [ "https://scitools.github.io/test-iris-imagehash/images/v4/ea9521fb956a394069921e93f07f4aad856cc47e4e95857a1ea5da3591ba1b81.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ea9521fb956a394068931e9be07e4aa5856cc47e4a91957a1ba55bb5b17a3b81.png" ], - "example_tests.test_custom_aggregation.TestCustomAggregation.test_custom_aggregation.0": [ + "gallery_tests.test_plot_custom_aggregation.TestCustomAggregation.test_plot_custom_aggregation.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fe816e81917e907eb43e873f85677ac190f0703c6a95811f1ac33ce1a57a6f18.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fe816e81817e907eb43e873f85637ac198d8703c6a94811f1ac73ee1a57a6f90.png" ], - "example_tests.test_custom_file_loading.TestCustomFileLoading.test_custom_file_loading.0": [ + "gallery_tests.test_plot_custom_file_loading.TestCustomFileLoading.test_plot_custom_file_loading.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/faa0cbf1845e34be913787416edcc8bc3bc81f9b63332662a4ed30cdc1b2cd21.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fba0cbf1845e34be912787416edcc8bc3b881f9b62332762a5ad32cdc1b2cd21.png", "https://scitools.github.io/test-iris-imagehash/images/v4/faa1cb47845e34bc912797436cccc8343f11359b73523746c48c72d9d9b34da5.png" ], - "example_tests.test_deriving_phenomena.TestDerivingPhenomena.test_deriving_phenomena.0": [ + "gallery_tests.test_plot_deriving_phenomena.TestDerivingPhenomena.test_plot_deriving_phenomena.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/b9993986866952e6c9464639c4766bd9c669916e7b99c1663f99768990763e81.png", "https://scitools.github.io/test-iris-imagehash/images/v4/b99139de866952e6c946c639c47e6bd18769d16e7a9981662e813699d0763e89.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ec97681793689768943c97e8926669d186e8c33f6c99c32e6b936c83d33e2c98.png" ], - "example_tests.test_global_map.TestGlobalMap.test_global_map.0": [ + "gallery_tests.test_plot_global_map.TestGlobalMap.test_plot_global_map.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fa9979468566857ef07e3e8978566b91cb0179883c89946686a96b9d83766f81.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fa997b958466846ed13e87467a997a898d66d17e2cc9906684696f99d3162f81.png" ], - "example_tests.test_hovmoller.TestGlobalMap.test_hovmoller.0": [ + "gallery_tests.test_plot_hovmoller.TestGlobalMap.test_plot_hovmoller.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/bab430b4ce4bce43c5becf89c54b1a63c543c56e1e64907e3bb469b490de1ac1.png", "https://scitools.github.io/test-iris-imagehash/images/v4/eeb46cb4934b934bc07e974bc14b38949943c0fe3e94c17f6ea46cb4c07b3f00.png" ], - "example_tests.test_inset_plot.TestInsetPlot.test_inset_plot.0": [ + "gallery_tests.test_plot_inset.TestInsetPlot.test_plot_inset.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/ebff6992f50096a5b245dac4f6559496b49248dbc95dcb699529912dcf244a54.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e9ff6992b50096a5b245dac4f64594b6b49248dbc95dcb699529952dcf244a56.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ebff6992b50096ad9267dac4d64094b294924cdbc95d4b699d29952dcda46e94.png" ], - "example_tests.test_lagged_ensemble.TestLaggedEnsemble.test_lagged_ensemble.0": [ + "gallery_tests.test_plot_lagged_ensemble.TestLaggedEnsemble.test_plot_lagged_ensemble.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/bbbb31e1c44e64e4b0459b5bb1716ecac464f496ce34618eb1079b39b193ce25.png", "https://scitools.github.io/test-iris-imagehash/images/v4/bbbb31b1c44e64e4b1579b5b917133cecc61f146c414668eb1119b1bb197ce34.png" ], - "example_tests.test_lagged_ensemble.TestLaggedEnsemble.test_lagged_ensemble.1": [ + "gallery_tests.test_plot_lagged_ensemble.TestLaggedEnsemble.test_plot_lagged_ensemble.1": [ "https://scitools.github.io/test-iris-imagehash/images/v4/abfef958fd462c993a07d87960464b81d1009687c139d3b594e9cf87c6b89687.png", "https://scitools.github.io/test-iris-imagehash/images/v4/aafec5e9e5e03e099a07e0f86542db879438261ec3b13ce78d8dc65a92d83d89.png" ], - "example_tests.test_lineplot_with_legend.TestLineplotWithLegend.test_lineplot_with_legend.0": [ + "gallery_tests.test_plot_lineplot_with_legend.TestLineplotWithLegend.test_plot_lineplot_with_legend.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/eae942526540b869961f8da694589da69543cc9af1014afbc3fd596b84fe19a7.png", "https://scitools.github.io/test-iris-imagehash/images/v4/eae942146540b869961f8de694589da69543cc9af1014afbc3fd596b84fe19a7.png", "https://scitools.github.io/test-iris-imagehash/images/v4/eafd9e12a5a061e9925ec716de489e9685078ec981b229e70ddb79219cc3768d.png" ], - "example_tests.test_load_nemo.TestLoadNemo.test_load_nemo.0": [ + "gallery_tests.test_plot_load_nemo.TestLoadNemo.test_plot_load_nemo.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/a3ff34e87f0049496d17c4d9c04fc225d256971392d39f1696df0f16cec00f36.png" ], - "example_tests.test_orca_projection.TestOrcaProjection.test_orca_projection.0": [ + "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fb11731a94cea4ee64b35e91d1d2304e9e5ac7397b20e1fe12852487e666ce46.png", "https://scitools.github.io/test-iris-imagehash/images/v4/bb11721a87cce5e4cce79e81d19b3b5e1e1cd3783168e07835853485e65e2e1e.png" ], - "example_tests.test_orca_projection.TestOrcaProjection.test_orca_projection.1": [ + "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.1": [ "https://scitools.github.io/test-iris-imagehash/images/v4/e5a665a69a599659e5db1865c2653b869996cce63e99e19a1a912639e7181e65.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e58661969e799659c1f719a6c867359a1996c0773649c09c3e612679c07b3f66.png" ], - "example_tests.test_orca_projection.TestOrcaProjection.test_orca_projection.2": [ + "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.2": [ "https://scitools.github.io/test-iris-imagehash/images/v4/f2c464ce9e399332e1b74ce1cc79338c6586e5b33b31b37a66c9664cc06e1a64.png", "https://scitools.github.io/test-iris-imagehash/images/v4/a58660ce9e739b31c93d1cc9c8df33863383e33b3f11c03f2664366cc8ee3cc1.png" ], - "example_tests.test_orca_projection.TestOrcaProjection.test_orca_projection.3": [ + "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.3": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a83846ea46ce539c93391de32cc86cf87a33fa168721cdb3e896e374b04.png", "https://scitools.github.io/test-iris-imagehash/images/v4/be817a87845ea56cec79817a919e338436a5c1e73fa16c736c4a3e816a1e6b1c.png" ], - "example_tests.test_polar_stereo.TestPolarStereo.test_polar_stereo.0": [ + "gallery_tests.test_plot_polar_stereo.TestPolarStereo.test_plot_polar_stereo.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/e168317a92d36d89c5bb9e94c55e6f0c9a93c15a6ec584763b21716791de3a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b9e16079971e9e93c8ce0f84c31e3b929f92c0ff3ca1c17e39e03961c07e3f80.png" + "https://scitools.github.io/test-iris-imagehash/images/v4/b9e16079971e9e93c8ce0f84c31e3b929f92c0ff3ca1c17e39e03961c07e3f80.png", + "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e615ec7e097a9961f9cb190f838e091c2c1e73f07c11f6f386b3cc1783e11.png" ], - "example_tests.test_polynomial_fit.TestPolynomialFit.test_polynomial_fit.0": [ + "gallery_tests.test_plot_polynomial_fit.TestPolynomialFit.test_plot_polynomial_fit.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/abff4a9df26435886520c97f12414695c4b69d23934bc86adc969237d68ccc6f.png", "https://scitools.github.io/test-iris-imagehash/images/v4/aaff4a9df26435886520c97f12414695c4b69d23934bc86adc969a17d69ccc6f.png", "https://scitools.github.io/test-iris-imagehash/images/v4/aeffcb34d244348be5a2c96c3a4fc6d0c4b69f2d87294ccb9f1a125684cd7c11.png" ], - "example_tests.test_projections_and_annotations.TestProjectionsAndAnnotations.test_projections_and_annotations.0": [ + "gallery_tests.test_plot_projections_and_annotations.TestProjectionsAndAnnotations.test_plot_projections_and_annotations.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fa854f19851a30e4cc76cd0bb179325ca7c665b0c938cb4b4e719e9cb727b5c0.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fac54f19851a30e4cc76cd0bb179325cb78665b0c938cb4b4e719e9c9727b5c0.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fa854e19851a30e4cc76cd0bb179325cb7c664b0c938cb4bce739e9c37a3b5c0.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fa854e19851a30e4cc76cd0bb179325cb78665b1c938c94bce739e9c3727b5c0.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fa854f19851a30e4cc76cd0bb0f932dca7c665b1c92ccb4b4ed19e9c3721b5c8.png" ], - "example_tests.test_projections_and_annotations.TestProjectionsAndAnnotations.test_projections_and_annotations.1": [ + "gallery_tests.test_plot_projections_and_annotations.TestProjectionsAndAnnotations.test_plot_projections_and_annotations.1": [ "https://scitools.github.io/test-iris-imagehash/images/v4/e385699d9c3896627243318fcdad5a7dc6dba492e9b69964936dc21974b18592.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e385699d9c3896727243318f8dad5a7dc65ba492b93699649b6dc25b64938592.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e385699d9c3896627243318fcdad5a7dc6dba492b93699649b6dc25964938592.png", @@ -130,29 +131,29 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/e3856b999c3896727243318f8dad5a75865ba492e9b69964db6cc65b74918592.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e3856d999c389662734731afcdad5a7384daa592b1b69b64d26dc29974b18590.png" ], - "example_tests.test_rotated_pole_mapping.TestRotatedPoleMapping.test_rotated_pole_mapping.0": [ + "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fa15615e97a193adc15e1e81c4fa3eb49d30817e3e05c17e7ba59927817e1e01.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ee46607e97a19781c0df1f81d0bb3e241f20c16f3fc0c1fe39263d33d06f3e80.png" ], - "example_tests.test_rotated_pole_mapping.TestRotatedPoleMapping.test_rotated_pole_mapping.1": [ + "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.1": [ "https://scitools.github.io/test-iris-imagehash/images/v4/ba056717c3e099e9b90f8e81c4da589499b696763e45e56b3b893929c17b7e01.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ea57685f95a886a1c0de9da090be3e2697e1c0ff3f00c17e6b266c17c07f3f00.png" ], - "example_tests.test_rotated_pole_mapping.TestRotatedPoleMapping.test_rotated_pole_mapping.2": [ + "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.2": [ "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e605ec7a191a1b85e9e81c4da58909996b37e3a65e16f7b817939e57a1e01.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e605ec7a193a1b85e9e81c4da58909996b3763a65e16f7b816939ed7a1e01.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e85a697e97a18681c6da9f8190bf3e263624c1ef3b48c17a2b223c47c0ff3f81.png", "https://scitools.github.io/test-iris-imagehash/images/v4/ea57685f95a886a1c0de9da090be3e2497e1c0ef3f01c17e6b366c17c07b3f01.png" ], - "example_tests.test_rotated_pole_mapping.TestRotatedPoleMapping.test_rotated_pole_mapping.3": [ + "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.3": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fa8172d0847ecd2bc913939c36846c714933799cc3cc8727e67639f939996a58.png", "https://scitools.github.io/test-iris-imagehash/images/v4/fa8172c6857ecd38cb3392ce36c564311931d85ec64e9787719a39993c316e66.png" ], - "example_tests.test_wind_speed.TestWindSpeed.test_wind_speed.0": [ + "gallery_tests.test_plot_wind_speed.TestWindSpeed.test_plot_wind_speed.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/bcf924fb9306930ce12ccf97c73236b28ecec4cd3e29847b18e639e6c14f1a09.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e9e960e996169306c1ee9e96c29e36739e13c07d3d61c07f39a139a1c07f3f01.png" ], - "example_tests.test_wind_speed.TestWindSpeed.test_wind_speed.1": [ + "gallery_tests.test_plot_wind_speed.TestWindSpeed.test_plot_wind_speed.1": [ "https://scitools.github.io/test-iris-imagehash/images/v4/bcf924fb9306930ce12ccf97c73236b28ecec4cc3e29847b38e639e6c14f1a09.png", "https://scitools.github.io/test-iris-imagehash/images/v4/e9e960e996169306c1ee9e86c29e36739e13c07d3d61c07f39a139a1c17f3f01.png" ], diff --git a/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl b/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl index 1740926645..1f6bc36832 100644 --- a/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl +++ b/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl @@ -17,11 +17,11 @@ variables: double time_climatology(time, bnds) ; double latitude(latitude) ; latitude:axis = "Y" ; - latitude:units = "1" ; + latitude:units = "degrees_north" ; latitude:standard_name = "latitude" ; double longitude(longitude) ; longitude:axis = "X" ; - longitude:units = "1" ; + longitude:units = "degrees_east" ; longitude:standard_name = "longitude" ; // global attributes: diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/0_TRACER_AIR_CONCENTRATION.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/0_TRACER_AIR_CONCENTRATION.cml deleted file mode 100644 index b0daf50907..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEII/0_TRACER_AIR_CONCENTRATION.cml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/1_TRACER_DOSAGE.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/1_TRACER_DOSAGE.cml deleted file mode 100644 index aef4988ce6..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEII/1_TRACER_DOSAGE.cml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/3_TRACER_DRY_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/3_TRACER_DRY_DEPOSITION.cml deleted file mode 100644 index 5787c19643..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEII/3_TRACER_DRY_DEPOSITION.cml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/4_TRACER_TOTAL_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/4_TRACER_TOTAL_DEPOSITION.cml deleted file mode 100644 index 5787c19643..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEII/4_TRACER_TOTAL_DEPOSITION.cml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/0_TRACER_AIR_CONCENTRATION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/0_TRACER_AIR_CONCENTRATION.cml deleted file mode 100644 index 1a31427de0..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEIII/0_TRACER_AIR_CONCENTRATION.cml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/1_TRACER_AIR_CONCENTRATION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/1_TRACER_AIR_CONCENTRATION.cml deleted file mode 100644 index 7007836e62..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEIII/1_TRACER_AIR_CONCENTRATION.cml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/2_TRACER_DRY_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/2_TRACER_DRY_DEPOSITION.cml deleted file mode 100644 index 850ef89ed2..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEIII/2_TRACER_DRY_DEPOSITION.cml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/3_TRACER_WET_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/3_TRACER_WET_DEPOSITION.cml deleted file mode 100644 index ade4cea92d..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEIII/3_TRACER_WET_DEPOSITION.cml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/4_TRACER_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/4_TRACER_DEPOSITION.cml deleted file mode 100644 index 088b622c46..0000000000 --- a/lib/iris/tests/results/integration/name_grib/NAMEIII/4_TRACER_DEPOSITION.cml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml index 39cb8f2950..e48cf41d2a 100644 --- a/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml index 1c59fc947e..78fec459e9 100644 --- a/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml b/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml index 8dd0e43b71..ca4a0eb017 100644 --- a/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml +++ b/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml @@ -1,6 +1,6 @@ - + @@ -20,7 +20,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -66,7 +66,7 @@ - + @@ -89,7 +89,7 @@ - + @@ -112,7 +112,7 @@ - + @@ -131,7 +131,7 @@ - + @@ -150,7 +150,7 @@ - + @@ -170,7 +170,7 @@ - + @@ -190,7 +190,7 @@ - + @@ -213,7 +213,7 @@ - + @@ -232,7 +232,7 @@ - + @@ -252,7 +252,7 @@ - + @@ -272,7 +272,7 @@ - + @@ -295,7 +295,7 @@ - + @@ -320,7 +320,7 @@ - + @@ -333,7 +333,7 @@ - + @@ -346,7 +346,7 @@ - + @@ -359,7 +359,7 @@ - + @@ -372,7 +372,7 @@ - + @@ -385,7 +385,7 @@ - + @@ -404,7 +404,7 @@ - + @@ -424,7 +424,7 @@ - + @@ -447,7 +447,7 @@ - + @@ -460,7 +460,7 @@ - + @@ -473,7 +473,7 @@ - + @@ -486,7 +486,7 @@ - + @@ -499,7 +499,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml index 27d4569236..ac41f4a8b8 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml @@ -13,11 +13,11 @@ - + 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> @@ -39,11 +39,11 @@ - + 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml index d677191beb..4234b5cc84 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml @@ -13,11 +13,11 @@ - + 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml index 775f480c66..17d87a0190 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml @@ -13,11 +13,11 @@ - + 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_save_no_name.cdl b/lib/iris/tests/results/netcdf/netcdf_save_no_name.cdl index e67316b2f7..f1399e88b3 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_no_name.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_no_name.cdl @@ -6,11 +6,9 @@ variables: double unknown(dim0, dim1) ; unknown:coordinates = "unknown_scalar" ; double dim0(dim0) ; - dim0:units = "1" ; double dim1(dim1) ; dim1:units = "m" ; char unknown_scalar(string6) ; - unknown_scalar:units = "no_unit" ; // global attributes: :Conventions = "CF-1.7" ; diff --git a/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml index 39cb8f2950..e48cf41d2a 100644 --- a/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml index 1c59fc947e..78fec459e9 100644 --- a/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml @@ -6,7 +6,7 @@ - + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl index 3646627746..3c1033c17e 100644 --- a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl @@ -17,10 +17,10 @@ variables: double time_climatology(time, bnds) ; double latitude(latitude) ; latitude:axis = "Y" ; - latitude:units = "1" ; + latitude:units = "degrees_north" ; latitude:standard_name = "latitude" ; double longitude(longitude) ; longitude:axis = "X" ; - longitude:units = "1" ; + longitude:units = "degrees_east" ; longitude:standard_name = "longitude" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml index 3ea688d1fa..0bf359e9c4 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml @@ -1,6 +1,6 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml index 829c7ce38e..e5cec55565 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml @@ -1,6 +1,6 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml index 4f84609832..b484ebb305 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml @@ -1,6 +1,6 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml index caafa5845c..c594c748cd 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml @@ -1,6 +1,6 @@ - + diff --git a/lib/iris/tests/runner/_runner.py b/lib/iris/tests/runner/_runner.py index 41d27bbfe3..71b6e5fcc6 100644 --- a/lib/iris/tests/runner/_runner.py +++ b/lib/iris/tests/runner/_runner.py @@ -21,7 +21,7 @@ class TestRunner: description = ( "Run tests under nose and multiprocessor for performance. " - "Default behaviour is to run all non-example tests. " + "Default behaviour is to run all non-gallery tests. " "Specifying one or more test flags will run *only* those " "tests." ) @@ -34,7 +34,7 @@ class TestRunner: ), ("stop", "x", "Stop running tests after the first error or failure."), ("system-tests", "s", "Run the limited subset of system tests."), - ("example-tests", "e", "Run the example code tests."), + ("gallery-tests", "e", "Run the gallery code tests."), ("default-tests", "d", "Run the default tests."), ( "coding-tests", @@ -53,7 +53,7 @@ class TestRunner: "no-data", "system-tests", "stop", - "example-tests", + "gallery-tests", "default-tests", "coding-tests", "create-missing", @@ -63,7 +63,7 @@ def initialize_options(self): self.no_data = False self.stop = False self.system_tests = False - self.example_tests = False + self.gallery_tests = False self.default_tests = False self.coding_tests = False self.num_processors = None @@ -87,8 +87,8 @@ def finalize_options(self): tests.append("default") if self.coding_tests: tests.append("coding") - if self.example_tests: - tests.append("example") + if self.gallery_tests: + tests.append("gallery") if not tests: tests.append("default") print("Running test suite(s): {}".format(", ".join(tests))) @@ -114,19 +114,19 @@ def run(self): tests.append("iris.tests") if self.coding_tests: tests.append("iris.tests.test_coding_standards") - if self.example_tests: + if self.gallery_tests: import iris.config default_doc_path = os.path.join(sys.path[0], "docs", "iris") doc_path = iris.config.get_option( "Resources", "doc_dir", default=default_doc_path ) - example_path = os.path.join(doc_path, "example_tests") - if os.path.exists(example_path): - tests.append(example_path) + gallery_path = os.path.join(doc_path, "gallery_tests") + if os.path.exists(gallery_path): + tests.append(gallery_path) else: print( - "WARNING: Example path %s does not exist." % (example_path) + "WARNING: Gallery path %s does not exist." % (gallery_path) ) if not tests: tests.append("iris.tests") @@ -143,10 +143,7 @@ def run(self): regexp_pat, "--process-timeout=180", ] - try: - import gribapi # noqa - except ImportError: - args.append("--exclude=^grib$") + if self.stop: args.append("--stop") diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index 9bb1c4626f..42ddfac161 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -721,12 +721,6 @@ def realistic_4d_w_missing_data(): return cube -def global_grib2(): - path = tests.get_data_path(("GRIB", "global_t", "global.grib2")) - cube = iris.load_cube(path) - return cube - - def ocean_sigma_z(): """ Return a sample cube with an @@ -834,8 +828,8 @@ def jan_offset(day, year): units="days since 1970-01-01 00:00:00-00", climatological=True, ) - lon_dim = DimCoord(lon, standard_name="longitude") - lat_dim = DimCoord(lat, standard_name="latitude") + lon_dim = DimCoord(lon, standard_name="longitude", units="degrees") + lat_dim = DimCoord(lat, standard_name="latitude", units="degrees") data_shape = (len(time_points), len(lat), len(lon)) values = np.zeros(shape=data_shape, dtype=np.int8) diff --git a/lib/iris/tests/system_test.py b/lib/iris/tests/system_test.py index 207bd700a3..a98a83768a 100644 --- a/lib/iris/tests/system_test.py +++ b/lib/iris/tests/system_test.py @@ -65,8 +65,6 @@ def horiz_cs(): ) filetypes = (".nc", ".pp") - if tests.GRIB_AVAILABLE: - filetypes += (".grib2",) for filetype in filetypes: saved_tmpfile = iris.util.create_temp_filename(suffix=filetype) iris.save(cm, saved_tmpfile) diff --git a/lib/iris/tests/test_aggregate_by.py b/lib/iris/tests/test_aggregate_by.py index b4e1bad640..bc759f251d 100644 --- a/lib/iris/tests/test_aggregate_by.py +++ b/lib/iris/tests/test_aggregate_by.py @@ -89,7 +89,9 @@ def setUp(self): ) model_level = iris.coords.DimCoord( - np.arange(z_points.size), standard_name="model_level_number" + np.arange(z_points.size), + standard_name="model_level_number", + units="1", ) self.cube_single.add_aux_coord(self.coord_z_single, 0) @@ -124,7 +126,9 @@ def setUp(self): ) model_level = iris.coords.DimCoord( - np.arange(z1_points.size), standard_name="model_level_number" + np.arange(z1_points.size), + standard_name="model_level_number", + units="1", ) self.cube_multi.add_aux_coord(self.coord_z1_multi, 0) diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index 94880d6ed1..a559ee0e8a 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -235,7 +235,11 @@ def test_addition_different_attributes(self): b.attributes["my attribute"] = "foobar" c = a + b self.assertIsNone(c.standard_name) - self.assertEqual(c.attributes, {}) + expected = { + "my attribute": "foobar", + "source": "Data from Met Office Unified Model", + } + self.assertEqual(expected, c.attributes) def test_apply_ufunc(self): a = self.cube @@ -344,10 +348,13 @@ def test_ifunc_call_fail(self): my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) - # should fail because giving 2 arguments to an ifunc that expects - # only one - with self.assertRaises(ValueError): - my_ifunc(a, a) + # should now NOT fail because giving 2 arguments to an ifunc that + # expects only one will now ignore the surplus argument and raise + # a logging message instead, and go on to perform the operation. + emsg = "ValueError not raised" + with self.assertRaisesRegex(AssertionError, emsg): + with self.assertRaises(ValueError): + my_ifunc(a, a) my_ifunc = iris.analysis.maths.IFunc( np.multiply, lambda a: cf_units.Unit("1") @@ -509,7 +516,11 @@ def test_multiplication_different_attributes(self): b.attributes["my attribute"] = "foobar" c = a * b self.assertIsNone(c.standard_name) - self.assertEqual(c.attributes, {}) + expected = { + "source": "Data from Met Office Unified Model", + "my attribute": "foobar", + } + self.assertEqual(expected, c.attributes) def test_multiplication_in_place(self): a = self.cube.copy() diff --git a/lib/iris/tests/test_cdm.py b/lib/iris/tests/test_cdm.py index ab27ad6040..bbaae1a8de 100644 --- a/lib/iris/tests/test_cdm.py +++ b/lib/iris/tests/test_cdm.py @@ -1022,14 +1022,6 @@ def test_metadata_fail(self): (), ) with self.assertRaises(TypeError): - self.t.metadata = { - "standard_name": "air_pressure", - "long_name": "foo", - "var_name": "bar", - "units": "", - "attributes": {"random": "12"}, - } - with self.assertRaises(TypeError): class Metadata: pass diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index cfb54203b3..00ce7b7d44 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -104,13 +104,12 @@ def test_license_headers(self): "setup.py", "build/*", "dist/*", - "docs/iris/example_code/*/*.py", + "docs/iris/gallery_code/*/*.py", "docs/iris/src/developers_guide/documenting/*.py", - "docs/iris/src/sphinxext/gen_gallery.py", "docs/iris/src/userguide/plotting_examples/*.py", "docs/iris/src/userguide/regridding_plots/*.py", "docs/iris/src/developers_guide/gitwash_dumper.py", - "docs/iris/build/*", + "docs/iris/src/_build/*", "lib/iris/analysis/_scipy_interpolate.py", "lib/iris/fileformats/_pyke_rules/*", ) diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index bbe5f5eba2..d45a884a2f 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -66,52 +66,58 @@ def _make_cube( cube_data = np.empty((y_size, x_size), dtype=np.float32) cube_data[:] = data cube = iris.cube.Cube(cube_data) - coord = DimCoord(y_range, long_name="y") + coord = DimCoord(y_range, long_name="y", units="1") coord.guess_bounds() cube.add_dim_coord(coord, 0) - coord = DimCoord(x_range, long_name="x") + coord = DimCoord(x_range, long_name="x", units="1") coord.guess_bounds() cube.add_dim_coord(coord, 1) if aux is not None: aux = aux.split(",") if "y" in aux: - coord = AuxCoord(y_range * 10, long_name="y-aux") + coord = AuxCoord(y_range * 10, long_name="y-aux", units="1") cube.add_aux_coord(coord, (0,)) if "x" in aux: - coord = AuxCoord(x_range * 10, long_name="x-aux") + coord = AuxCoord(x_range * 10, long_name="x-aux", units="1") cube.add_aux_coord(coord, (1,)) if "xy" in aux: payload = np.arange(y_size * x_size, dtype=np.float32).reshape( y_size, x_size ) - coord = AuxCoord(payload * 100 + offset, long_name="xy-aux") + coord = AuxCoord( + payload * 100 + offset, long_name="xy-aux", units="1" + ) cube.add_aux_coord(coord, (0, 1)) if cell_measure is not None: cell_measure = cell_measure.split(",") if "y" in cell_measure: - cm = CellMeasure(y_range * 10, long_name="y-aux") + cm = CellMeasure(y_range * 10, long_name="y-aux", units="1") cube.add_cell_measure(cm, (0,)) if "x" in cell_measure: - cm = CellMeasure(x_range * 10, long_name="x-aux") + cm = CellMeasure(x_range * 10, long_name="x-aux", units="1") cube.add_cell_measure(cm, (1,)) if "xy" in cell_measure: payload = x_range + y_range[:, np.newaxis] - cm = CellMeasure(payload * 100 + offset, long_name="xy-aux") + cm = CellMeasure( + payload * 100 + offset, long_name="xy-aux", units="1" + ) cube.add_cell_measure(cm, (0, 1)) if ancil is not None: ancil = ancil.split(",") if "y" in ancil: - av = AncillaryVariable(y_range * 10, long_name="y-aux") + av = AncillaryVariable(y_range * 10, long_name="y-aux", units="1") cube.add_ancillary_variable(av, (0,)) if "x" in ancil: - av = AncillaryVariable(x_range * 10, long_name="x-aux") + av = AncillaryVariable(x_range * 10, long_name="x-aux", units="1") cube.add_ancillary_variable(av, (1,)) if "xy" in ancil: payload = x_range + y_range[:, np.newaxis] - av = AncillaryVariable(payload * 100 + offset, long_name="xy-aux") + av = AncillaryVariable( + payload * 100 + offset, long_name="xy-aux", units="1" + ) cube.add_ancillary_variable(av, (0, 1)) if scalar is not None: @@ -169,50 +175,56 @@ def _make_cube_3d(x, y, z, data, aux=None, offset=0): cube_data = np.empty((x_size, y_size, z_size), dtype=np.float32) cube_data[:] = data cube = iris.cube.Cube(cube_data) - coord = DimCoord(z_range, long_name="z") + coord = DimCoord(z_range, long_name="z", units="1") coord.guess_bounds() cube.add_dim_coord(coord, 0) - coord = DimCoord(y_range, long_name="y") + coord = DimCoord(y_range, long_name="y", units="1") coord.guess_bounds() cube.add_dim_coord(coord, 1) - coord = DimCoord(x_range, long_name="x") + coord = DimCoord(x_range, long_name="x", units="1") coord.guess_bounds() cube.add_dim_coord(coord, 2) if aux is not None: aux = aux.split(",") if "z" in aux: - coord = AuxCoord(z_range * 10, long_name="z-aux") + coord = AuxCoord(z_range * 10, long_name="z-aux", units="1") cube.add_aux_coord(coord, (0,)) if "y" in aux: - coord = AuxCoord(y_range * 10, long_name="y-aux") + coord = AuxCoord(y_range * 10, long_name="y-aux", units="1") cube.add_aux_coord(coord, (1,)) if "x" in aux: - coord = AuxCoord(x_range * 10, long_name="x-aux") + coord = AuxCoord(x_range * 10, long_name="x-aux", units="1") cube.add_aux_coord(coord, (2,)) if "xy" in aux: payload = np.arange(x_size * y_size, dtype=np.float32).reshape( y_size, x_size ) - coord = AuxCoord(payload + offset, long_name="xy-aux") + coord = AuxCoord(payload + offset, long_name="xy-aux", units="1") cube.add_aux_coord(coord, (1, 2)) if "xz" in aux: payload = np.arange(x_size * z_size, dtype=np.float32).reshape( z_size, x_size ) - coord = AuxCoord(payload * 10 + offset, long_name="xz-aux") + coord = AuxCoord( + payload * 10 + offset, long_name="xz-aux", units="1" + ) cube.add_aux_coord(coord, (0, 2)) if "yz" in aux: payload = np.arange(y_size * z_size, dtype=np.float32).reshape( z_size, y_size ) - coord = AuxCoord(payload * 100 + offset, long_name="yz-aux") + coord = AuxCoord( + payload * 100 + offset, long_name="yz-aux", units="1" + ) cube.add_aux_coord(coord, (0, 1)) if "xyz" in aux: payload = np.arange( x_size * y_size * z_size, dtype=np.float32 ).reshape(z_size, y_size, x_size) - coord = AuxCoord(payload * 1000 + offset, long_name="xyz-aux") + coord = AuxCoord( + payload * 1000 + offset, long_name="xyz-aux", units="1" + ) cube.add_aux_coord(coord, (0, 1, 2)) return cube diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 053b6b509b..c9c686b78b 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -247,7 +247,7 @@ def test_basic(self): "AuxCoord(" "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," " standard_name=None," - " units=Unit('1')," + " units=Unit('unknown')," " attributes={'monty': 'python'})" ) self.assertEqual(result, str(b)) @@ -337,7 +337,7 @@ def test_basic(self): "DimCoord(" "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," " standard_name=None," - " units=Unit('1')," + " units=Unit('unknown')," " attributes={'monty': 'python'})" ) self.assertEqual(result, str(b)) @@ -944,11 +944,11 @@ def test_circular(self): r.circular = False self.assertTrue(r.is_compatible(self.dim_coord)) - def test_defn(self): - coord_defn = self.aux_coord._as_defn() - self.assertTrue(self.aux_coord.is_compatible(coord_defn)) - coord_defn = self.dim_coord._as_defn() - self.assertTrue(self.dim_coord.is_compatible(coord_defn)) + def test_metadata(self): + metadata = self.aux_coord.metadata + self.assertTrue(self.aux_coord.is_compatible(metadata)) + metadata = self.dim_coord.metadata + self.assertTrue(self.dim_coord.is_compatible(metadata)) def test_is_ignore(self): r = self.aux_coord.copy() diff --git a/lib/iris/tests/test_hybrid.py b/lib/iris/tests/test_hybrid.py index 28a733f7cc..29bad235c7 100644 --- a/lib/iris/tests/test_hybrid.py +++ b/lib/iris/tests/test_hybrid.py @@ -51,6 +51,11 @@ def test_indexing(self): _ = cube.coord("altitude") self.assertCML(cube, ("derived", "column.cml")) + def test_removing_derived_coord(self): + cube = self.cube + cube.remove_coord("altitude") + self.assertCML(cube, ("derived", "removed_derived_coord.cml")) + def test_removing_sigma(self): # Check the cube remains OK when sigma is removed. cube = self.cube diff --git a/lib/iris/tests/test_iterate.py b/lib/iris/tests/test_iterate.py index 85f5943b8e..e53eede6f4 100644 --- a/lib/iris/tests/test_iterate.py +++ b/lib/iris/tests/test_iterate.py @@ -475,17 +475,24 @@ def test_izip_nd_non_ortho(self): def test_izip_nd_ortho(self): cube1 = iris.cube.Cube(np.zeros((5, 5, 5, 5, 5), dtype="f8")) cube1.add_dim_coord( - iris.coords.DimCoord(np.arange(5, dtype="i8"), long_name="z"), [0] + iris.coords.DimCoord( + np.arange(5, dtype="i8"), long_name="z", units="1" + ), + [0], ) cube1.add_aux_coord( iris.coords.AuxCoord( - np.arange(25, dtype="i8").reshape(5, 5), long_name="y" + np.arange(25, dtype="i8").reshape(5, 5), + long_name="y", + units="1", ), [1, 2], ) cube1.add_aux_coord( iris.coords.AuxCoord( - np.arange(25, dtype="i8").reshape(5, 5), long_name="x" + np.arange(25, dtype="i8").reshape(5, 5), + long_name="x", + units="1", ), [3, 4], ) diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index a550e1ed4b..c69a83edd5 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -16,9 +16,11 @@ import os.path import shutil import stat +from subprocess import check_call import tempfile from unittest import mock +from cf_units import as_unit import netCDF4 as nc import numpy as np import numpy.ma as ma @@ -27,6 +29,7 @@ import iris.analysis.trajectory import iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc as pyke_rules import iris.fileformats.netcdf +from iris.fileformats.netcdf import load_cubes as nc_load_cubes import iris.std_names import iris.util import iris.coord_systems as icoord_systems @@ -292,6 +295,42 @@ def test_deferred_loading(self): cube[0][(0, 2), (1, 3)], ("netcdf", "netcdf_deferred_mix_1.cml") ) + def test_default_units(self): + # Note: using a CDL string as a test data reference, rather than a binary file. + ref_cdl = """ + netcdf cm_attr { + dimensions: + axv = 3 ; + ayv = 2 ; + variables: + int64 qqv(ayv, axv) ; + qqv:long_name = "qq" ; + int64 ayv(ayv) ; + ayv:long_name = "y" ; + int64 axv(axv) ; + axv:units = "1" ; + axv:long_name = "x" ; + data: + axv = 11, 12, 13; + ayv = 21, 22; + } + """ + self.tmpdir = tempfile.mkdtemp() + cdl_path = os.path.join(self.tmpdir, "tst.cdl") + nc_path = os.path.join(self.tmpdir, "tst.nc") + # Write CDL string into a temporary CDL file. + with open(cdl_path, "w") as f_out: + f_out.write(ref_cdl) + # Use ncgen to convert this into an actual (temporary) netCDF file. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + check_call(command, shell=True) + # Load with iris.fileformats.netcdf.load_cubes, and check expected content. + cubes = list(nc_load_cubes(nc_path)) + self.assertEqual(len(cubes), 1) + self.assertEqual(cubes[0].units, as_unit("unknown")) + self.assertEqual(cubes[0].coord("y").units, as_unit("unknown")) + self.assertEqual(cubes[0].coord("x").units, as_unit(1)) + def test_units(self): # Test exercising graceful cube and coordinate units loading. cube0, cube1 = sorted( @@ -608,10 +647,10 @@ def test_netcdf_multi_with_coords(self): def test_netcdf_multi_wtih_samedimcoord(self): time1 = iris.coords.DimCoord( - np.arange(10), standard_name="time", var_name="time" + np.arange(10), standard_name="time", var_name="time", units="1" ) time2 = iris.coords.DimCoord( - np.arange(20), standard_name="time", var_name="time" + np.arange(20), standard_name="time", var_name="time", units="1" ) self.cube4.add_dim_coord(time1, 0) @@ -630,11 +669,13 @@ def test_netcdf_multi_wtih_samedimcoord(self): def test_netcdf_multi_conflict_name_dup_coord(self): # Duplicate coordinates with modified variable names lookup. latitude1 = iris.coords.DimCoord( - np.arange(10), standard_name="latitude" + np.arange(10), standard_name="latitude", units="1" + ) + time2 = iris.coords.DimCoord( + np.arange(2), standard_name="time", units="1" ) - time2 = iris.coords.DimCoord(np.arange(2), standard_name="time") latitude2 = iris.coords.DimCoord( - np.arange(2), standard_name="latitude" + np.arange(2), standard_name="latitude", units="1" ) self.cube6.add_dim_coord(latitude1, 0) @@ -711,10 +752,10 @@ def test_netcdf_save_conflicting_aux(self): # Test saving CF-netCDF with multi-dimensional auxiliary coordinates, # with conflicts. self.cube4.add_aux_coord( - iris.coords.AuxCoord(np.arange(10), "time"), 0 + iris.coords.AuxCoord(np.arange(10), "time", units="1"), 0 ) self.cube6.add_aux_coord( - iris.coords.AuxCoord(np.arange(10, 20), "time"), 0 + iris.coords.AuxCoord(np.arange(10, 20), "time", units="1"), 0 ) cubes = iris.cube.CubeList([self.cube4, self.cube6]) @@ -811,9 +852,11 @@ def test_netcdf_save_conflicting_names(self): # Test saving CF-netCDF with a dimension name corresponding to # an existing variable name (conflict). self.cube4.add_dim_coord( - iris.coords.DimCoord(np.arange(10), "time"), 0 + iris.coords.DimCoord(np.arange(10), "time", units="1"), 0 + ) + self.cube6.add_aux_coord( + iris.coords.AuxCoord(1, "time", units="1"), None ) - self.cube6.add_aux_coord(iris.coords.AuxCoord(1, "time"), None) cubes = iris.cube.CubeList([self.cube4, self.cube6]) with self.temp_filename(suffix=".nc") as file_out: diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 04418d8d40..600801312f 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -875,6 +875,7 @@ def test_non_cube_coordinate(self): pts, standard_name="model_level_number", attributes={"positive": "up"}, + units="1", ) self.draw("contourf", cube, coords=["grid_latitude", x]) diff --git a/lib/iris/tests/test_whatsnew_contributions.py b/lib/iris/tests/test_whatsnew_contributions.py deleted file mode 100644 index 42e816acba..0000000000 --- a/lib/iris/tests/test_whatsnew_contributions.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Check the the latest "whatsnew" contributions files have usable names. - -The files in "./docs...whatsnew/contributions_/" should have filenames -with a particular structure, which encodes a summary name. -These names are interpreted by "./docs...whatsnew/aggregate_directory.py". -This test just ensures that all those files have names which that process -can accept. - -.. note: - This only works within a developer installation: In a 'normal' install the - location of the docs sources is not known. - In a Travis installation, this test silently passes and the .travis.yml - invokes the checking command directly. - -""" - -# import iris tests first. -import iris.tests as tests - -import os -import os.path -import subprocess -import sys - -import iris - - -class TestWhatsnewContribs(tests.IrisTest): - def test_check_contributions(self): - # Get dirpath of overall iris installation. - # Note: assume iris at "/lib/iris". - iris_module_dirpath = os.path.dirname(iris.__file__) - iris_dirs = iris_module_dirpath.split(os.sep) - install_dirpath = os.sep.join(iris_dirs[:-2]) - - # Construct path to docs 'whatsnew' directory. - # Note: assume docs at "/docs". - whatsnew_dirpath = os.path.join( - install_dirpath, "docs", "iris", "src", "whatsnew" - ) - - # Quietly ignore if the directory does not exist: It is only there in - # in a developer installation, not a normal install. - # Travis bypasses this problem by running the test directly. - if os.path.exists(whatsnew_dirpath): - # Run a 'check contributions' command in that directory. - cmd = [ - sys.executable, - "aggregate_directory.py", - "--checkonly", - "--quiet", - ] - subprocess.check_call(cmd, cwd=whatsnew_dirpath) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py index d1eaf71030..fa4ca8b608 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py @@ -34,9 +34,15 @@ class ThreeDimCube(tests.IrisTest): def setUp(self): cube = stock.simple_3d_w_multidim_coords() - cube.add_aux_coord(iris.coords.DimCoord(np.arange(2), "height"), 0) - cube.add_dim_coord(iris.coords.DimCoord(np.arange(3), "latitude"), 1) - cube.add_dim_coord(iris.coords.DimCoord(np.arange(4), "longitude"), 2) + cube.add_aux_coord( + iris.coords.DimCoord(np.arange(2), "height", units="1"), 0 + ) + cube.add_dim_coord( + iris.coords.DimCoord(np.arange(3), "latitude", units="1"), 1 + ) + cube.add_dim_coord( + iris.coords.DimCoord(np.arange(4), "longitude", units="1"), 2 + ) self.data = np.arange(24).reshape(2, 3, 4).astype(np.float32) cube.data = self.data self.cube = cube diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index 55fb2f4829..492283f843 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -1253,7 +1253,7 @@ def setUp(self): units="m", attributes={"positive": "up"}, ) - sigma = AuxCoord(1, long_name="sigma") + sigma = AuxCoord(1, long_name="sigma", units="1") surface_altitude = AuxCoord( (src.data - src.data.min()) * 50, "surface_altitude", units="m" ) diff --git a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py index 2b2524795c..cffac86291 100644 --- a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py @@ -71,7 +71,7 @@ def test_simple_single_point(self): self.assertIs(actual.data, data) name = "percentile_over_time" coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name) + expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) def test_simple_multiple_points(self): @@ -89,7 +89,7 @@ def test_simple_multiple_points(self): self.assertArrayEqual(actual.data, expected) name = "percentile_over_time" coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name) + expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) def test_multi_single_point(self): @@ -105,7 +105,7 @@ def test_multi_single_point(self): self.assertIs(actual.data, data) name = "percentile_over_time" coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name) + expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) def test_multi_multiple_points(self): @@ -123,7 +123,7 @@ def test_multi_multiple_points(self): self.assertArrayEqual(actual.data, expected) name = "percentile_over_time" coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name) + expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) diff --git a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py index 1c59ded1fc..878708e48a 100644 --- a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py @@ -82,7 +82,7 @@ def test_simple_single_point(self): self.assertIs(actual.data, data) name = "weighted_percentile_over_time" coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name) + expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) def test_simple_multiple_points(self): @@ -107,7 +107,7 @@ def test_simple_multiple_points(self): self.assertIs(actual[1], total_weights) name = "weighted_percentile_over_time" coord = actual[0].coord(name) - expected = AuxCoord(percent, long_name=name) + expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) def test_multi_single_point(self): @@ -123,7 +123,7 @@ def test_multi_single_point(self): self.assertIs(actual.data, data) name = "weighted_percentile_over_time" coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name) + expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) def test_multi_multiple_points(self): @@ -141,7 +141,7 @@ def test_multi_multiple_points(self): self.assertArrayEqual(actual.data, expected) name = "weighted_percentile_over_time" coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name) + expected = AuxCoord(percent, long_name=name, units="percent") self.assertEqual(coord, expected) diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py index 789b1d61d5..14944891f2 100644 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py @@ -144,7 +144,9 @@ def setUp(self): self.delta = iris.coords.DimCoord( [0.0, 1.0, 2.0], long_name="level_pressure", units="Pa" ) - self.sigma = iris.coords.DimCoord([1.0, 0.9, 0.8], long_name="sigma") + self.sigma = iris.coords.DimCoord( + [1.0, 0.9, 0.8], long_name="sigma", units="1" + ) self.surface_air_pressure = iris.coords.AuxCoord( np.arange(4).reshape(2, 2), "surface_air_pressure", units="Pa" ) diff --git a/lib/iris/tests/integration/format_interop/__init__.py b/lib/iris/tests/unit/common/__init__.py similarity index 78% rename from lib/iris/tests/integration/format_interop/__init__.py rename to lib/iris/tests/unit/common/__init__.py index b9024f2f39..5380785042 100644 --- a/lib/iris/tests/integration/format_interop/__init__.py +++ b/lib/iris/tests/unit/common/__init__.py @@ -3,4 +3,4 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Integration tests for format interoperability.""" +"""Unit tests for the :mod:`iris.common` module.""" diff --git a/lib/iris/tests/unit/cube_coord_common/__init__.py b/lib/iris/tests/unit/common/lenient/__init__.py similarity index 75% rename from lib/iris/tests/unit/cube_coord_common/__init__.py rename to lib/iris/tests/unit/common/lenient/__init__.py index 4390f95921..2a99e7a4c2 100644 --- a/lib/iris/tests/unit/cube_coord_common/__init__.py +++ b/lib/iris/tests/unit/common/lenient/__init__.py @@ -3,4 +3,4 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the :mod:`iris._cube_coord_common` module.""" +"""Unit tests for the :mod:`iris.common.lenient` package.""" diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py new file mode 100644 index 0000000000..8ca98342ca --- /dev/null +++ b/lib/iris/tests/unit/common/lenient/test_Lenient.py @@ -0,0 +1,182 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.lenient.Lenient`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from unittest.mock import sentinel + +from iris.common.lenient import Lenient, _LENIENT + + +class Test___init__(tests.IrisTest): + def test_default(self): + lenient = Lenient() + expected = dict(maths=True) + self.assertEqual(expected, lenient.__dict__) + + def test_kwargs(self): + lenient = Lenient(maths=False) + expected = dict(maths=False) + self.assertEqual(expected, lenient.__dict__) + + def test_kwargs_invalid(self): + emsg = "Invalid .* option, got 'merge'." + with self.assertRaisesRegex(KeyError, emsg): + _ = Lenient(merge=True) + + +class Test___contains__(tests.IrisTest): + def setUp(self): + self.lenient = Lenient() + + def test_in(self): + self.assertTrue("maths", self.lenient) + + def test_not_in(self): + self.assertTrue(("concatenate", self.lenient)) + + +class Test___getitem__(tests.IrisTest): + def setUp(self): + self.lenient = Lenient() + + def test_in(self): + self.assertTrue(self.lenient["maths"]) + + def test_not_in(self): + emsg = "Invalid .* option, got 'MATHS'." + with self.assertRaisesRegex(KeyError, emsg): + _ = self.lenient["MATHS"] + + +class Test___repr__(tests.IrisTest): + def setUp(self): + self.lenient = Lenient() + + def test(self): + expected = "Lenient(maths=True)" + self.assertEqual(expected, repr(self.lenient)) + + +class Test___setitem__(tests.IrisTest): + def setUp(self): + self.lenient = Lenient() + + def test_key_invalid(self): + emsg = "Invalid .* option, got 'MATHS." + with self.assertRaisesRegex(KeyError, emsg): + self.lenient["MATHS"] = False + + def test_maths_value_invalid(self): + value = sentinel.value + emsg = f"Invalid .* option 'maths' value, got {value!r}." + with self.assertRaisesRegex(ValueError, emsg): + self.lenient["maths"] = value + + def test_maths_disable__lenient_enable_true(self): + self.assertTrue(_LENIENT.enable) + self.lenient["maths"] = False + self.assertFalse(self.lenient.__dict__["maths"]) + self.assertFalse(_LENIENT.enable) + + def test_maths_disable__lenient_enable_false(self): + _LENIENT.__dict__["enable"] = False + self.assertFalse(_LENIENT.enable) + self.lenient["maths"] = False + self.assertFalse(self.lenient.__dict__["maths"]) + self.assertFalse(_LENIENT.enable) + + def test_maths_enable__lenient_enable_true(self): + self.assertTrue(_LENIENT.enable) + self.lenient["maths"] = True + self.assertTrue(self.lenient.__dict__["maths"]) + self.assertTrue(_LENIENT.enable) + + def test_maths_enable__lenient_enable_false(self): + _LENIENT.__dict__["enable"] = False + self.assertFalse(_LENIENT.enable) + self.lenient["maths"] = True + self.assertTrue(self.lenient.__dict__["maths"]) + self.assertTrue(_LENIENT.enable) + + +class Test_context(tests.IrisTest): + def setUp(self): + self.lenient = Lenient() + + def test_nop(self): + self.assertTrue(self.lenient["maths"]) + + with self.lenient.context(): + self.assertTrue(self.lenient["maths"]) + + self.assertTrue(self.lenient["maths"]) + + def test_maths_disable__lenient_true(self): + # synchronised + self.assertTrue(_LENIENT.enable) + self.assertTrue(self.lenient["maths"]) + + with self.lenient.context(maths=False): + # still synchronised + self.assertFalse(_LENIENT.enable) + self.assertFalse(self.lenient["maths"]) + + # still synchronised + self.assertTrue(_LENIENT.enable) + self.assertTrue(self.lenient["maths"]) + + def test_maths_disable__lenient_false(self): + # not synchronised + _LENIENT.__dict__["enable"] = False + self.assertFalse(_LENIENT.enable) + self.assertTrue(self.lenient["maths"]) + + with self.lenient.context(maths=False): + # now synchronised + self.assertFalse(_LENIENT.enable) + self.assertFalse(self.lenient["maths"]) + + # still synchronised + self.assertTrue(_LENIENT.enable) + self.assertTrue(self.lenient["maths"]) + + def test_maths_enable__lenient_true(self): + # not synchronised + self.assertTrue(_LENIENT.enable) + self.lenient.__dict__["maths"] = False + self.assertFalse(self.lenient["maths"]) + + with self.lenient.context(maths=True): + # now synchronised + self.assertTrue(_LENIENT.enable) + self.assertTrue(self.lenient["maths"]) + + # still synchronised + self.assertFalse(_LENIENT.enable) + self.assertFalse(self.lenient["maths"]) + + def test_maths_enable__lenient_false(self): + # synchronised + _LENIENT.__dict__["enable"] = False + self.assertFalse(_LENIENT.enable) + self.lenient.__dict__["maths"] = False + self.assertFalse(self.lenient["maths"]) + + with self.lenient.context(maths=True): + # still synchronised + self.assertTrue(_LENIENT.enable) + self.assertTrue(self.lenient["maths"]) + + # still synchronised + self.assertFalse(_LENIENT.enable) + self.assertFalse(self.lenient["maths"]) diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py new file mode 100644 index 0000000000..d6bc2882d6 --- /dev/null +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -0,0 +1,835 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.lenient._Lenient`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from collections.abc import Iterable + +from iris.common.lenient import ( + _LENIENT_ENABLE_DEFAULT, + _LENIENT_PROTECTED, + _Lenient, + _qualname, +) + + +class Test___init__(tests.IrisTest): + def setUp(self): + self.expected = dict(active=None, enable=_LENIENT_ENABLE_DEFAULT) + + def test_default(self): + lenient = _Lenient() + self.assertEqual(self.expected, lenient.__dict__) + + def test_args_service_str(self): + service = "service1" + lenient = _Lenient(service) + self.expected.update(dict(service1=True)) + self.assertEqual(self.expected, lenient.__dict__) + + def test_args_services_str(self): + services = ("service1", "service2") + lenient = _Lenient(*services) + self.expected.update(dict(service1=True, service2=True)) + self.assertEqual(self.expected, lenient.__dict__) + + def test_args_services_callable(self): + def service1(): + pass + + def service2(): + pass + + services = (service1, service2) + lenient = _Lenient(*services) + self.expected.update( + {_qualname(service1): True, _qualname(service2): True,} + ) + self.assertEqual(self.expected, lenient.__dict__) + + def test_kwargs_client_str(self): + client = dict(client1="service1") + lenient = _Lenient(**client) + self.expected.update(dict(client1=("service1",))) + self.assertEqual(self.expected, lenient.__dict__) + + def test_kwargs_clients_str(self): + clients = dict(client1="service1", client2="service2") + lenient = _Lenient(**clients) + self.expected.update( + dict(client1=("service1",), client2=("service2",)) + ) + self.assertEqual(self.expected, lenient.__dict__) + + def test_kwargs_clients_callable(self): + def client1(): + pass + + def client2(): + pass + + def service1(): + pass + + def service2(): + pass + + qualname_client1 = _qualname(client1) + qualname_client2 = _qualname(client2) + clients = { + qualname_client1: service1, + qualname_client2: (service1, service2), + } + lenient = _Lenient(**clients) + self.expected.update( + { + _qualname(client1): (_qualname(service1),), + _qualname(client2): (_qualname(service1), _qualname(service2)), + } + ) + self.assertEqual(self.expected, lenient.__dict__) + + +class Test___call__(tests.IrisTest): + def setUp(self): + self.client = "myclient" + self.lenient = _Lenient() + + def test_missing_service_str(self): + self.assertFalse(self.lenient("myservice")) + + def test_missing_service_callable(self): + def myservice(): + pass + + self.assertFalse(self.lenient(myservice)) + + def test_disabled_service_str(self): + service = "myservice" + self.lenient.__dict__[service] = False + self.assertFalse(self.lenient(service)) + + def test_disable_service_callable(self): + def myservice(): + pass + + qualname_service = _qualname(myservice) + self.lenient.__dict__[qualname_service] = False + self.assertFalse(self.lenient(myservice)) + + def test_service_str_with_no_active_client(self): + service = "myservice" + self.lenient.__dict__[service] = True + self.assertFalse(self.lenient(service)) + + def test_service_callable_with_no_active_client(self): + def myservice(): + pass + + qualname_service = _qualname(myservice) + self.lenient.__dict__[qualname_service] = True + self.assertFalse(self.lenient(myservice)) + + def test_service_str_with_active_client_with_no_registered_services(self): + service = "myservice" + self.lenient.__dict__[service] = True + self.lenient.__dict__["active"] = self.client + self.assertFalse(self.lenient(service)) + + def test_service_callable_with_active_client_with_no_registered_services( + self, + ): + def myservice(): + pass + + def myclient(): + pass + + qualname_service = _qualname(myservice) + self.lenient.__dict__[qualname_service] = True + self.lenient.__dict__["active"] = _qualname(myclient) + self.assertFalse(self.lenient(myservice)) + + def test_service_str_with_active_client_with_unmatched_registered_services( + self, + ): + service = "myservice" + self.lenient.__dict__[service] = True + self.lenient.__dict__["active"] = self.client + self.lenient.__dict__[self.client] = ("service1", "service2") + self.assertFalse(self.lenient(service)) + + def test_service_callable_with_active_client_with_unmatched_registered_services( + self, + ): + def myservice(): + pass + + def myclient(): + pass + + qualname_service = _qualname(myservice) + qualname_client = _qualname(myclient) + self.lenient.__dict__[qualname_service] = True + self.lenient.__dict__["active"] = qualname_client + self.lenient.__dict__[qualname_client] = ("service1", "service2") + self.assertFalse(self.lenient(myservice)) + + def test_service_str_with_active_client_with_registered_services(self): + service = "myservice" + self.lenient.__dict__[service] = True + self.lenient.__dict__["active"] = self.client + self.lenient.__dict__[self.client] = ("service1", "service2", service) + self.assertTrue(self.lenient(service)) + + def test_service_callable_with_active_client_with_registered_services( + self, + ): + def myservice(): + pass + + def myclient(): + pass + + qualname_service = _qualname(myservice) + qualname_client = _qualname(myclient) + self.lenient.__dict__[qualname_service] = True + self.lenient.__dict__["active"] = qualname_client + self.lenient.__dict__[qualname_client] = ( + "service1", + "service2", + qualname_service, + ) + self.assertTrue(self.lenient(myservice)) + + def test_service_str_with_active_client_with_unmatched_registered_service_str( + self, + ): + service = "myservice" + self.lenient.__dict__[service] = True + self.lenient.__dict__["active"] = self.client + self.lenient.__dict__[self.client] = "serviceXXX" + self.assertFalse(self.lenient(service)) + + def test_service_callable_with_active_client_with_unmatched_registered_service_str( + self, + ): + def myservice(): + pass + + def myclient(): + pass + + qualname_service = _qualname(myservice) + qualname_client = _qualname(myclient) + self.lenient.__dict__[qualname_service] = True + self.lenient.__dict__["active"] = qualname_client + self.lenient.__dict__[qualname_client] = f"{qualname_service}XXX" + self.assertFalse(self.lenient(myservice)) + + def test_service_str_with_active_client_with_registered_service_str(self): + service = "myservice" + self.lenient.__dict__[service] = True + self.lenient.__dict__["active"] = self.client + self.lenient.__dict__[self.client] = service + self.assertTrue(self.lenient(service)) + + def test_service_callable_with_active_client_with_registered_service_str( + self, + ): + def myservice(): + pass + + def myclient(): + pass + + qualname_service = _qualname(myservice) + qualname_client = _qualname(myclient) + self.lenient.__dict__[qualname_service] = True + self.lenient.__dict__["active"] = qualname_client + self.lenient.__dict__[qualname_client] = qualname_service + self.assertTrue(self.lenient(myservice)) + + def test_enable(self): + service = "myservice" + self.lenient.__dict__[service] = True + self.lenient.__dict__["active"] = self.client + self.lenient.__dict__[self.client] = service + self.assertTrue(self.lenient(service)) + self.lenient.__dict__["enable"] = False + self.assertFalse(self.lenient(service)) + + +class Test___contains__(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_in(self): + self.assertIn("active", self.lenient) + + def test_not_in(self): + self.assertNotIn("ACTIVATE", self.lenient) + + def test_in_qualname(self): + def func(): + pass + + qualname_func = _qualname(func) + lenient = _Lenient() + lenient.__dict__[qualname_func] = None + self.assertIn(func, lenient) + self.assertIn(qualname_func, lenient) + + +class Test___getattr__(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_in(self): + self.assertIsNone(self.lenient.active) + + def test_not_in(self): + emsg = "Invalid .* option, got 'wibble'." + with self.assertRaisesRegex(AttributeError, emsg): + _ = self.lenient.wibble + + +class Test__getitem__(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_in(self): + self.assertIsNone(self.lenient["active"]) + + def test_in_callable(self): + def service(): + pass + + qualname_service = _qualname(service) + self.lenient.__dict__[qualname_service] = True + self.assertTrue(self.lenient[service]) + + def test_not_in(self): + emsg = "Invalid .* option, got 'wibble'." + with self.assertRaisesRegex(KeyError, emsg): + _ = self.lenient["wibble"] + + def test_not_in_callable(self): + def service(): + pass + + qualname_service = _qualname(service) + emsg = f"Invalid .* option, got '{qualname_service}'." + with self.assertRaisesRegex(KeyError, emsg): + _ = self.lenient[service] + + +class Test___setitem__(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_not_in(self): + emsg = "Invalid .* option, got 'wibble'." + with self.assertRaisesRegex(KeyError, emsg): + self.lenient["wibble"] = None + + def test_in_value_str(self): + client = "client" + service = "service" + self.lenient.__dict__[client] = None + self.lenient[client] = service + self.assertEqual(self.lenient.__dict__[client], (service,)) + + def test_callable_in_value_str(self): + def client(): + pass + + service = "service" + qualname_client = _qualname(client) + self.lenient.__dict__[qualname_client] = None + self.lenient[client] = service + self.assertEqual(self.lenient.__dict__[qualname_client], (service,)) + + def test_in_value_callable(self): + def service(): + pass + + client = "client" + qualname_service = _qualname(service) + self.lenient.__dict__[client] = None + self.lenient[client] = service + self.assertEqual(self.lenient.__dict__[client], (qualname_service,)) + + def test_callable_in_value_callable(self): + def client(): + pass + + def service(): + pass + + qualname_client = _qualname(client) + qualname_service = _qualname(service) + self.lenient.__dict__[qualname_client] = None + self.lenient[client] = service + self.assertEqual( + self.lenient.__dict__[qualname_client], (qualname_service,) + ) + + def test_in_value_bool(self): + client = "client" + self.lenient.__dict__[client] = None + self.lenient[client] = True + self.assertTrue(self.lenient.__dict__[client]) + self.assertFalse(isinstance(self.lenient.__dict__[client], Iterable)) + + def test_callable_in_value_bool(self): + def client(): + pass + + qualname_client = _qualname(client) + self.lenient.__dict__[qualname_client] = None + self.lenient[client] = True + self.assertTrue(self.lenient.__dict__[qualname_client]) + self.assertFalse( + isinstance(self.lenient.__dict__[qualname_client], Iterable) + ) + + def test_in_value_iterable(self): + client = "client" + services = ("service1", "service2") + self.lenient.__dict__[client] = None + self.lenient[client] = services + self.assertEqual(self.lenient.__dict__[client], services) + + def test_callable_in_value_iterable(self): + def client(): + pass + + qualname_client = _qualname(client) + services = ("service1", "service2") + self.lenient.__dict__[qualname_client] = None + self.lenient[client] = services + self.assertEqual(self.lenient.__dict__[qualname_client], services) + + def test_in_value_iterable_callable(self): + def service1(): + pass + + def service2(): + pass + + client = "client" + self.lenient.__dict__[client] = None + qualname_services = (_qualname(service1), _qualname(service2)) + self.lenient[client] = (service1, service2) + self.assertEqual(self.lenient.__dict__[client], qualname_services) + + def test_callable_in_value_iterable_callable(self): + def client(): + pass + + def service1(): + pass + + def service2(): + pass + + qualname_client = _qualname(client) + self.lenient.__dict__[qualname_client] = None + qualname_services = (_qualname(service1), _qualname(service2)) + self.lenient[client] = (service1, service2) + self.assertEqual( + self.lenient.__dict__[qualname_client], qualname_services + ) + + def test_active_iterable(self): + active = "active" + self.assertIsNone(self.lenient.__dict__[active]) + emsg = "Invalid .* option 'active'" + with self.assertRaisesRegex(ValueError, emsg): + self.lenient[active] = (None,) + + def test_active_str(self): + active = "active" + client = "client1" + self.assertIsNone(self.lenient.__dict__[active]) + self.lenient[active] = client + self.assertEqual(self.lenient.__dict__[active], client) + + def test_active_callable(self): + def client(): + pass + + active = "active" + qualname_client = _qualname(client) + self.assertIsNone(self.lenient.__dict__[active]) + self.lenient[active] = client + self.assertEqual(self.lenient.__dict__[active], qualname_client) + + def test_enable(self): + enable = "enable" + self.assertEqual( + self.lenient.__dict__[enable], _LENIENT_ENABLE_DEFAULT + ) + self.lenient[enable] = True + self.assertTrue(self.lenient.__dict__[enable]) + self.lenient[enable] = False + self.assertFalse(self.lenient.__dict__[enable]) + + def test_enable_invalid(self): + emsg = "Invalid .* option 'enable'" + with self.assertRaisesRegex(ValueError, emsg): + self.lenient["enable"] = None + + +class Test_context(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + self.default = dict(active=None, enable=_LENIENT_ENABLE_DEFAULT) + + def copy(self): + return self.lenient.__dict__.copy() + + def test_nop(self): + pre = self.copy() + with self.lenient.context(): + context = self.copy() + post = self.copy() + self.assertEqual(pre, self.default) + self.assertEqual(context, self.default) + self.assertEqual(post, self.default) + + def test_active_str(self): + client = "client" + pre = self.copy() + with self.lenient.context(active=client): + context = self.copy() + post = self.copy() + self.assertEqual(pre, self.default) + expected = self.default.copy() + expected.update(dict(active=client)) + self.assertEqual(context, expected) + self.assertEqual(post, self.default) + + def test_active_callable(self): + def client(): + pass + + pre = self.copy() + with self.lenient.context(active=client): + context = self.copy() + post = self.copy() + qualname_client = _qualname(client) + self.assertEqual(pre, self.default) + expected = self.default.copy() + expected.update(dict(active=qualname_client)) + self.assertEqual(context, expected) + self.assertEqual(post, self.default) + + def test_kwargs(self): + client = "client" + self.lenient.__dict__["service1"] = False + self.lenient.__dict__["service2"] = False + pre = self.copy() + with self.lenient.context(active=client, service1=True, service2=True): + context = self.copy() + post = self.copy() + self.default.update(dict(service1=False, service2=False)) + self.assertEqual(pre, self.default) + expected = self.default.copy() + expected.update(dict(active=client, service1=True, service2=True)) + self.assertEqual(context, expected) + self.assertEqual(post, self.default) + + def test_args_str(self): + client = "client" + services = ("service1", "service2") + pre = self.copy() + with self.lenient.context(*services, active=client): + context = self.copy() + post = self.copy() + self.assertEqual(pre, self.default) + expected = self.default.copy() + expected.update(dict(active=client, client=services)) + self.assertEqual(context["active"], expected["active"]) + self.assertEqual(set(context["client"]), set(expected["client"])) + self.assertEqual(post, self.default) + + def test_args_callable(self): + def service1(): + pass + + def service2(): + pass + + client = "client" + services = (service1, service2) + pre = self.copy() + with self.lenient.context(*services, active=client): + context = self.copy() + post = self.copy() + qualname_services = tuple([_qualname(service) for service in services]) + self.assertEqual(pre, self.default) + expected = self.default.copy() + expected.update(dict(active=client, client=qualname_services)) + self.assertEqual(context["active"], expected["active"]) + self.assertEqual(set(context["client"]), set(expected["client"])) + self.assertEqual(post, self.default) + + def test_context_runtime(self): + services = ("service1", "service2") + pre = self.copy() + with self.lenient.context(*services): + context = self.copy() + post = self.copy() + self.assertEqual(pre, self.default) + expected = self.default.copy() + expected.update(dict(active="__context", __context=services)) + self.assertEqual(context, expected) + self.assertEqual(post, self.default) + + +class Test_enable(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_getter(self): + self.assertEqual(self.lenient.enable, _LENIENT_ENABLE_DEFAULT) + + def test_setter_invalid(self): + emsg = "Invalid .* option 'enable'" + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.enable = 0 + + def test_setter(self): + self.assertEqual(self.lenient.enable, _LENIENT_ENABLE_DEFAULT) + self.lenient.enable = False + self.assertFalse(self.lenient.enable) + + +class Test_register_client(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_not_protected(self): + emsg = "Cannot register .* client" + for protected in _LENIENT_PROTECTED: + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.register_client(protected, "service") + + def test_str_service_str(self): + client = "client" + services = "service" + self.lenient.register_client(client, services) + self.assertIn(client, self.lenient.__dict__) + self.assertEqual(self.lenient.__dict__[client], (services,)) + + def test_str_services_str(self): + client = "client" + services = ("service1", "service2") + self.lenient.register_client(client, services) + self.assertIn(client, self.lenient.__dict__) + self.assertEqual(self.lenient.__dict__[client], services) + + def test_callable_service_callable(self): + def client(): + pass + + def service(): + pass + + qualname_client = _qualname(client) + qualname_service = _qualname(service) + self.lenient.register_client(client, service) + self.assertIn(qualname_client, self.lenient.__dict__) + self.assertEqual( + self.lenient.__dict__[qualname_client], (qualname_service,) + ) + + def test_callable_services_callable(self): + def client(): + pass + + def service1(): + pass + + def service2(): + pass + + qualname_client = _qualname(client) + qualname_services = (_qualname(service1), _qualname(service2)) + self.lenient.register_client(client, (service1, service2)) + self.assertIn(qualname_client, self.lenient.__dict__) + self.assertEqual( + self.lenient.__dict__[qualname_client], qualname_services + ) + + def test_services_empty(self): + emsg = "Require at least one .* client service." + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.register_client("client", ()) + + def test_services_overwrite(self): + client = "client" + services = ("service1", "service2") + self.lenient.__dict__[client] = services + self.assertEqual(self.lenient[client], services) + new_services = ("service3", "service4") + self.lenient.register_client(client, services=new_services) + self.assertEqual(self.lenient[client], new_services) + + def test_services_append(self): + client = "client" + services = ("service1", "service2") + self.lenient.__dict__[client] = services + self.assertEqual(self.lenient[client], services) + new_services = ("service3", "service4") + self.lenient.register_client( + client, services=new_services, append=True + ) + expected = set(services + new_services) + self.assertEqual(set(self.lenient[client]), expected) + + +class Test_register_service(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_str(self): + service = "service" + self.assertNotIn(service, self.lenient.__dict__) + self.lenient.register_service(service) + self.assertIn(service, self.lenient.__dict__) + self.assertFalse(isinstance(self.lenient.__dict__[service], Iterable)) + self.assertTrue(self.lenient.__dict__[service]) + + def test_callable(self): + def service(): + pass + + qualname_service = _qualname(service) + self.assertNotIn(qualname_service, self.lenient.__dict__) + self.lenient.register_service(service) + self.assertIn(qualname_service, self.lenient.__dict__) + self.assertFalse( + isinstance(self.lenient.__dict__[qualname_service], Iterable) + ) + self.assertTrue(self.lenient.__dict__[qualname_service]) + + def test_not_protected(self): + emsg = "Cannot register .* service" + for protected in _LENIENT_PROTECTED: + self.lenient.__dict__[protected] = None + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.register_service("active") + + +class Test_unregister_client(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_not_protected(self): + emsg = "Cannot unregister .* client, as .* is a protected .* option." + for protected in _LENIENT_PROTECTED: + self.lenient.__dict__[protected] = None + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.unregister_client(protected) + + def test_not_in(self): + emsg = "Cannot unregister unknown .* client" + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.unregister_client("client") + + def test_not_client(self): + client = "client" + self.lenient.__dict__[client] = True + emsg = "Cannot unregister .* client, as .* is not a valid .* client." + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.unregister_client(client) + + def test_not_client_callable(self): + def client(): + pass + + qualname_client = _qualname(client) + self.lenient.__dict__[qualname_client] = True + emsg = "Cannot unregister .* client, as .* is not a valid .* client." + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.unregister_client(client) + + def test_str(self): + client = "client" + self.lenient.__dict__[client] = (None,) + self.lenient.unregister_client(client) + self.assertNotIn(client, self.lenient.__dict__) + + def test_callable(self): + def client(): + pass + + qualname_client = _qualname(client) + self.lenient.__dict__[qualname_client] = (None,) + self.lenient.unregister_client(client) + self.assertNotIn(qualname_client, self.lenient.__dict__) + + +class Test_unregister_service(tests.IrisTest): + def setUp(self): + self.lenient = _Lenient() + + def test_not_protected(self): + emsg = "Cannot unregister .* service, as .* is a protected .* option." + for protected in _LENIENT_PROTECTED: + self.lenient.__dict__[protected] = None + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.unregister_service(protected) + + def test_not_in(self): + emsg = "Cannot unregister unknown .* service" + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.unregister_service("service") + + def test_not_service(self): + service = "service" + self.lenient.__dict__[service] = (None,) + emsg = "Cannot unregister .* service, as .* is not a valid .* service." + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.unregister_service(service) + + def test_not_service_callable(self): + def service(): + pass + + qualname_service = _qualname(service) + self.lenient.__dict__[qualname_service] = (None,) + emsg = "Cannot unregister .* service, as .* is not a valid .* service." + with self.assertRaisesRegex(ValueError, emsg): + self.lenient.unregister_service(service) + + def test_str(self): + service = "service" + self.lenient.__dict__[service] = True + self.lenient.unregister_service(service) + self.assertNotIn(service, self.lenient.__dict__) + + def test_callable(self): + def service(): + pass + + qualname_service = _qualname(service) + self.lenient.__dict__[qualname_service] = True + self.lenient.unregister_service(service) + self.assertNotIn(qualname_service, self.lenient.__dict__) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_client.py b/lib/iris/tests/unit/common/lenient/test__lenient_client.py new file mode 100644 index 0000000000..29cf5e7f82 --- /dev/null +++ b/lib/iris/tests/unit/common/lenient/test__lenient_client.py @@ -0,0 +1,182 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.common.lenient._lenient_client`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from inspect import getmodule +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _lenient_client + + +class Test(tests.IrisTest): + def setUp(self): + module_name = getmodule(self).__name__ + self.client = f"{module_name}" + ".Test.{}..myclient" + self.service = f"{module_name}" + ".Test.{}..myservice" + self.active = "active" + self.args_in = sentinel.arg1, sentinel.arg2 + self.kwargs_in = dict(kwarg1=sentinel.kwarg1, kwarg2=sentinel.kwarg2) + + def test_args_too_many(self): + emsg = "Invalid lenient client arguments, expecting 1" + with self.assertRaisesRegex(AssertionError, emsg): + _lenient_client(None, None) + + def test_args_not_callable(self): + emsg = "Invalid lenient client argument, expecting a callable" + with self.assertRaisesRegex(AssertionError, emsg): + _lenient_client(None) + + def test_args_and_kwargs(self): + def func(): + pass + + emsg = ( + "Invalid lenient client, got both arguments and keyword arguments" + ) + with self.assertRaisesRegex(AssertionError, emsg): + _lenient_client(func, services=func) + + def test_call_naked(self): + @_lenient_client + def myclient(): + return _LENIENT.__dict__.copy() + + result = myclient() + self.assertIn(self.active, result) + qualname_client = self.client.format("test_call_naked") + self.assertEqual(result[self.active], qualname_client) + self.assertNotIn(qualname_client, result) + + def test_call_naked_alternative(self): + def myclient(): + return _LENIENT.__dict__.copy() + + result = _lenient_client(myclient)() + self.assertIn(self.active, result) + qualname_client = self.client.format("test_call_naked_alternative") + self.assertEqual(result[self.active], qualname_client) + self.assertNotIn(qualname_client, result) + + def test_call_naked_client_args_kwargs(self): + @_lenient_client + def myclient(*args, **kwargs): + return args, kwargs + + args_out, kwargs_out = myclient(*self.args_in, **self.kwargs_in) + self.assertEqual(args_out, self.args_in) + self.assertEqual(kwargs_out, self.kwargs_in) + + def test_call_naked_doc(self): + @_lenient_client + def myclient(): + """myclient doc-string""" + + self.assertEqual(myclient.__doc__, "myclient doc-string") + + def test_call_no_kwargs(self): + @_lenient_client() + def myclient(): + return _LENIENT.__dict__.copy() + + result = myclient() + self.assertIn(self.active, result) + qualname_client = self.client.format("test_call_no_kwargs") + self.assertEqual(result[self.active], qualname_client) + self.assertNotIn(qualname_client, result) + + def test_call_no_kwargs_alternative(self): + def myclient(): + return _LENIENT.__dict__.copy() + + result = (_lenient_client())(myclient)() + self.assertIn(self.active, result) + qualname_client = self.client.format("test_call_no_kwargs_alternative") + self.assertEqual(result[self.active], qualname_client) + self.assertNotIn(qualname_client, result) + + def test_call_kwargs_none(self): + @_lenient_client(services=None) + def myclient(): + return _LENIENT.__dict__.copy() + + result = myclient() + self.assertIn(self.active, result) + qualname_client = self.client.format("test_call_kwargs_none") + self.assertEqual(result[self.active], qualname_client) + self.assertNotIn(qualname_client, result) + + def test_call_kwargs_single(self): + service = sentinel.service + + @_lenient_client(services=service) + def myclient(): + return _LENIENT.__dict__.copy() + + result = myclient() + self.assertIn(self.active, result) + qualname_client = self.client.format("test_call_kwargs_single") + self.assertEqual(result[self.active], qualname_client) + self.assertIn(qualname_client, result) + self.assertEqual(result[qualname_client], (service,)) + + def test_call_kwargs_single_callable(self): + def myservice(): + pass + + @_lenient_client(services=myservice) + def myclient(): + return _LENIENT.__dict__.copy() + + test_name = "test_call_kwargs_single_callable" + result = myclient() + self.assertIn(self.active, result) + qualname_client = self.client.format(test_name) + self.assertEqual(result[self.active], qualname_client) + self.assertIn(qualname_client, result) + qualname_services = (self.service.format(test_name),) + self.assertEqual(result[qualname_client], qualname_services) + + def test_call_kwargs_iterable(self): + services = (sentinel.service1, sentinel.service2) + + @_lenient_client(services=services) + def myclient(): + return _LENIENT.__dict__.copy() + + result = myclient() + self.assertIn(self.active, result) + qualname_client = self.client.format("test_call_kwargs_iterable") + self.assertEqual(result[self.active], qualname_client) + self.assertIn(qualname_client, result) + self.assertEqual(set(result[qualname_client]), set(services)) + + def test_call_client_args_kwargs(self): + @_lenient_client() + def myclient(*args, **kwargs): + return args, kwargs + + args_out, kwargs_out = myclient(*self.args_in, **self.kwargs_in) + self.assertEqual(args_out, self.args_in) + self.assertEqual(kwargs_out, self.kwargs_in) + + def test_call_doc(self): + @_lenient_client() + def myclient(): + """myclient doc-string""" + + self.assertEqual(myclient.__doc__, "myclient doc-string") + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_service.py b/lib/iris/tests/unit/common/lenient/test__lenient_service.py new file mode 100644 index 0000000000..3b019c9de5 --- /dev/null +++ b/lib/iris/tests/unit/common/lenient/test__lenient_service.py @@ -0,0 +1,116 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.common.lenient._lenient_service`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from inspect import getmodule +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _lenient_service + + +class Test(tests.IrisTest): + def setUp(self): + module_name = getmodule(self).__name__ + self.service = f"{module_name}" + ".Test.{}..myservice" + self.args_in = sentinel.arg1, sentinel.arg2 + self.kwargs_in = dict(kwarg1=sentinel.kwarg1, kwarg2=sentinel.kwarg2) + + def test_args_too_many(self): + emsg = "Invalid lenient service arguments, expecting 1" + with self.assertRaisesRegex(AssertionError, emsg): + _lenient_service(None, None) + + def test_args_not_callable(self): + emsg = "Invalid lenient service argument, expecting a callable" + with self.assertRaisesRegex(AssertionError, emsg): + _lenient_service(None) + + def test_call_naked(self): + @_lenient_service + def myservice(): + return _LENIENT.__dict__.copy() + + qualname_service = self.service.format("test_call_naked") + state = _LENIENT.__dict__ + self.assertIn(qualname_service, state) + self.assertTrue(state[qualname_service]) + result = myservice() + self.assertIn(qualname_service, result) + self.assertTrue(result[qualname_service]) + + def test_call_naked_alternative(self): + def myservice(): + return _LENIENT.__dict__.copy() + + qualname_service = self.service.format("test_call_naked_alternative") + result = _lenient_service(myservice)() + self.assertIn(qualname_service, result) + self.assertTrue(result[qualname_service]) + + def test_call_naked_service_args_kwargs(self): + @_lenient_service + def myservice(*args, **kwargs): + return args, kwargs + + args_out, kwargs_out = myservice(*self.args_in, **self.kwargs_in) + self.assertEqual(args_out, self.args_in) + self.assertEqual(kwargs_out, self.kwargs_in) + + def test_call_naked_doc(self): + @_lenient_service + def myservice(): + """myservice doc-string""" + + self.assertEqual(myservice.__doc__, "myservice doc-string") + + def test_call(self): + @_lenient_service() + def myservice(): + return _LENIENT.__dict__.copy() + + qualname_service = self.service.format("test_call") + state = _LENIENT.__dict__ + self.assertIn(qualname_service, state) + self.assertTrue(state[qualname_service]) + result = myservice() + self.assertIn(qualname_service, result) + self.assertTrue(result[qualname_service]) + + def test_call_alternative(self): + def myservice(): + return _LENIENT.__dict__.copy() + + qualname_service = self.service.format("test_call_alternative") + result = (_lenient_service())(myservice)() + self.assertIn(qualname_service, result) + self.assertTrue(result[qualname_service]) + + def test_call_service_args_kwargs(self): + @_lenient_service() + def myservice(*args, **kwargs): + return args, kwargs + + args_out, kwargs_out = myservice(*self.args_in, **self.kwargs_in) + self.assertEqual(args_out, self.args_in) + self.assertEqual(kwargs_out, self.kwargs_in) + + def test_call_doc(self): + @_lenient_service() + def myservice(): + """myservice doc-string""" + + self.assertEqual(myservice.__doc__, "myservice doc-string") + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/lenient/test__qualname.py b/lib/iris/tests/unit/common/lenient/test__qualname.py new file mode 100644 index 0000000000..e233b2ac78 --- /dev/null +++ b/lib/iris/tests/unit/common/lenient/test__qualname.py @@ -0,0 +1,66 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.common.lenient._qualname`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from inspect import getmodule +from unittest.mock import sentinel + +from iris.common.lenient import _qualname + + +class Test(tests.IrisTest): + def setUp(self): + module_name = getmodule(self).__name__ + self.locals = f"{module_name}" + ".Test.{}..{}" + + def test_pass_thru_non_callable(self): + func = sentinel.func + result = _qualname(func) + self.assertEqual(result, func) + + def test_callable_function_local(self): + def myfunc(): + pass + + qualname_func = self.locals.format( + "test_callable_function_local", "myfunc" + ) + result = _qualname(myfunc) + self.assertEqual(result, qualname_func) + + def test_callable_function(self): + import iris + + result = _qualname(iris.load) + self.assertEqual(result, "iris.load") + + def test_callable_method_local(self): + class MyClass: + def mymethod(self): + pass + + qualname_method = self.locals.format( + "test_callable_method_local", "MyClass.mymethod" + ) + result = _qualname(MyClass.mymethod) + self.assertEqual(result, qualname_method) + + def test_callable_method(self): + import iris + + result = _qualname(iris.cube.Cube.add_ancillary_variable) + self.assertEqual(result, "iris.cube.Cube.add_ancillary_variable") + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/__init__.py b/lib/iris/tests/unit/common/metadata/__init__.py new file mode 100644 index 0000000000..aba33c8312 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/__init__.py @@ -0,0 +1,6 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :mod:`iris.common.metadata` package.""" diff --git a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py new file mode 100644 index 0000000000..0e2ca52c47 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py @@ -0,0 +1,494 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.metadata.AncillaryVariableMetadata`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from copy import deepcopy +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata, AncillaryVariableMetadata + + +class Test(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.cls = AncillaryVariableMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + ) + fmt = ( + "AncillaryVariableMetadata(standard_name={!r}, long_name={!r}, " + "var_name={!r}, units={!r}, attributes={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + ) + self.assertEqual(self.cls._fields, expected) + + def test_bases(self): + self.assertTrue(issubclass(self.cls, BaseMetadata)) + + +class Test___eq__(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + ) + self.dummy = sentinel.dummy + self.cls = AncillaryVariableMetadata + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__, + ) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_call(self): + other = sentinel.other + return_value = sentinel.return_value + metadata = self.cls(*(None,) * len(self.cls._fields)) + with mock.patch.object( + BaseMetadata, "__eq__", return_value=return_value + ) as mocker: + result = metadata.__eq__(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = AncillaryVariableMetadata + self.one = self.cls(1, 1, 1, 1, 1) + self.two = self.cls(1, 1, 1, 2, 1) + self.none = self.cls(1, 1, 1, None, 1) + self.attributes = self.cls(1, 1, 1, 1, 10) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes(self): + result = self.one < self.attributes + self.assertFalse(result) + result = self.attributes < self.one + self.assertFalse(result) + + +class Test_combine(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + ) + self.dummy = sentinel.dummy + self.cls = AncillaryVariableMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__, + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["units"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + ) + self.dummy = sentinel.dummy + self.cls = AncillaryVariableMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__, + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["units"] = (left["units"], right["units"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["units"] = lexpected["units"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + +class Test_equal(tests.IrisTest): + def setUp(self): + self.cls = AncillaryVariableMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue(_LENIENT[self.cls.equal]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py new file mode 100644 index 0000000000..eb0ee9d659 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py @@ -0,0 +1,1636 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.metadata.BaseMetadata`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from collections import OrderedDict +import unittest.mock as mock +from unittest.mock import sentinel + +import numpy.ma as ma +import numpy as np + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata, CubeMetadata + + +class Test(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.cls = BaseMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + ) + fmt = ( + "BaseMetadata(standard_name={!r}, long_name={!r}, " + "var_name={!r}, units={!r}, attributes={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + ) + self.assertEqual(expected, self.cls._fields) + + +class Test___eq__(tests.IrisTest): + def setUp(self): + self.kwargs = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + ) + self.cls = BaseMetadata + self.metadata = self.cls(**self.kwargs) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_cannot_compare_non_class(self): + result = self.metadata.__eq__(None) + self.assertIs(NotImplemented, result) + + def test_cannot_compare_different_class(self): + other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) + result = self.metadata.__eq__(other) + self.assertIs(NotImplemented, result) + + def test_lenient(self): + return_value = sentinel.return_value + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ) as mlenient: + with mock.patch.object( + self.cls, "_compare_lenient", return_value=return_value + ) as mcompare: + result = self.metadata.__eq__(self.metadata) + + self.assertEqual(return_value, result) + self.assertEqual(1, mcompare.call_count) + (arg,), kwargs = mcompare.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + self.assertEqual(1, mlenient.call_count) + (arg,), kwargs = mlenient.call_args + self.assertEqual(_qualname(self.cls.__eq__), _qualname(arg)) + self.assertEqual(dict(), kwargs) + + def test_strict_same(self): + self.assertTrue(self.metadata.__eq__(self.metadata)) + other = self.cls(**self.kwargs) + self.assertTrue(self.metadata.__eq__(other)) + self.assertTrue(other.__eq__(self.metadata)) + + def test_strict_different(self): + self.kwargs["var_name"] = None + other = self.cls(**self.kwargs) + self.assertFalse(self.metadata.__eq__(other)) + self.assertFalse(other.__eq__(self.metadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = BaseMetadata + self.one = self.cls(1, 1, 1, 1, 1) + self.two = self.cls(1, 1, 1, 2, 1) + self.none = self.cls(1, 1, 1, None, 1) + self.attributes = self.cls(1, 1, 1, 1, 10) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes(self): + result = self.one < self.attributes + self.assertFalse(result) + result = self.attributes < self.one + self.assertFalse(result) + + +class Test___ne__(tests.IrisTest): + def setUp(self): + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.other = sentinel.other + + def test_notimplemented(self): + return_value = NotImplemented + with mock.patch.object( + self.cls, "__eq__", return_value=return_value + ) as mocker: + result = self.metadata.__ne__(self.other) + + self.assertIs(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(self.other, arg) + self.assertEqual(dict(), kwargs) + + def test_negate_true(self): + return_value = True + with mock.patch.object( + self.cls, "__eq__", return_value=return_value + ) as mocker: + result = self.metadata.__ne__(self.other) + + self.assertFalse(result) + (arg,), kwargs = mocker.call_args + self.assertEqual(self.other, arg) + self.assertEqual(dict(), kwargs) + + def test_negate_false(self): + return_value = False + with mock.patch.object( + self.cls, "__eq__", return_value=return_value + ) as mocker: + result = self.metadata.__ne__(self.other) + + self.assertTrue(result) + (arg,), kwargs = mocker.call_args + self.assertEqual(self.other, arg) + self.assertEqual(dict(), kwargs) + + +class Test__combine(tests.IrisTest): + def setUp(self): + self.kwargs = dict( + standard_name="standard_name", + long_name="long_name", + var_name="var_name", + units="units", + attributes=dict(one=sentinel.one, two=sentinel.two), + ) + self.cls = BaseMetadata + self.metadata = self.cls(**self.kwargs) + + def test_lenient(self): + return_value = sentinel._combine_lenient + other = sentinel.other + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ) as mlenient: + with mock.patch.object( + self.cls, "_combine_lenient", return_value=return_value + ) as mcombine: + result = self.metadata._combine(other) + + self.assertEqual(1, mlenient.call_count) + (arg,), kwargs = mlenient.call_args + self.assertEqual(self.metadata.combine, arg) + self.assertEqual(dict(), kwargs) + + self.assertEqual(return_value, result) + self.assertEqual(1, mcombine.call_count) + (arg,), kwargs = mcombine.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_strict(self): + dummy = sentinel.dummy + values = self.kwargs.copy() + values["standard_name"] = dummy + values["var_name"] = dummy + values["attributes"] = dummy + other = self.cls(**values) + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + result = self.metadata._combine(other) + + expected = [ + None if values[field] == dummy else values[field] + for field in self.cls._fields + ] + self.assertEqual(expected, result) + + +class Test__combine_lenient(tests.IrisTest): + def setUp(self): + self.cls = BaseMetadata + self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() + self.names = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + ) + + def test_strict_units(self): + left = self.none.copy() + left["units"] = "K" + right = left.copy() + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + expected = list(left.values()) + self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) + self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) + + def test_strict_units_different(self): + left = self.none.copy() + right = self.none.copy() + left["units"] = "K" + right["units"] = "km" + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._combine_lenient(rmetadata) + expected = list(self.none.values()) + self.assertEqual(expected, result) + result = rmetadata._combine_lenient(lmetadata) + self.assertEqual(expected, result) + + def test_strict_units_different_none(self): + left = self.none.copy() + right = self.none.copy() + left["units"] = "K" + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._combine_lenient(rmetadata) + expected = list(self.none.values()) + self.assertEqual(expected, result) + + result = rmetadata._combine_lenient(lmetadata) + self.assertEqual(expected, result) + + def test_attributes(self): + left = self.none.copy() + right = self.none.copy() + ldict = dict(item=sentinel.left) + rdict = dict(item=sentinel.right) + left["attributes"] = ldict + right["attributes"] = rdict + rmetadata = self.cls(**right) + return_value = sentinel.return_value + with mock.patch.object( + self.cls, "_combine_lenient_attributes", return_value=return_value, + ) as mocker: + lmetadata = self.cls(**left) + result = lmetadata._combine_lenient(rmetadata) + + expected = self.none.copy() + expected["attributes"] = return_value + expected = list(expected.values()) + self.assertEqual(expected, result) + + self.assertEqual(1, mocker.call_count) + args, kwargs = mocker.call_args + expected = (ldict, rdict) + self.assertEqual(expected, args) + self.assertEqual(dict(), kwargs) + + def test_attributes_non_mapping_different(self): + left = self.none.copy() + right = self.none.copy() + ldict = dict(item=sentinel.left) + rdict = sentinel.right + left["attributes"] = ldict + right["attributes"] = rdict + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + expected = list(self.none.copy().values()) + self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) + self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) + + def test_attributes_non_mapping_different_none(self): + left = self.none.copy() + right = self.none.copy() + ldict = dict(item=sentinel.left) + left["attributes"] = ldict + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._combine_lenient(rmetadata) + expected = self.none.copy() + expected["attributes"] = ldict + expected = list(expected.values()) + self.assertEqual(expected, result) + + result = rmetadata._combine_lenient(lmetadata) + self.assertEqual(expected, result) + + def test_names(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + expected = list(left.values()) + self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) + self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) + + def test_names_different(self): + dummy = sentinel.dummy + left = self.none.copy() + right = self.none.copy() + left.update(self.names) + right["standard_name"] = dummy + right["long_name"] = dummy + right["var_name"] = dummy + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + expected = list(self.none.copy().values()) + self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) + self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) + + def test_names_different_none(self): + left = self.none.copy() + right = self.none.copy() + left.update(self.names) + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._combine_lenient(rmetadata) + expected = list(left.values()) + self.assertEqual(expected, result) + + result = rmetadata._combine_lenient(lmetadata) + self.assertEqual(expected, result) + + +class Test__combine_lenient_attributes(tests.IrisTest): + def setUp(self): + self.values = OrderedDict( + one="one", + two="two", + three=np.int16(123), + four=np.arange(10), + five=ma.arange(10), + ) + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.dummy = sentinel.dummy + + def test_same(self): + left = self.values.copy() + right = self.values.copy() + + result = self.metadata._combine_lenient_attributes(left, right) + expected = left + self.assertDictEqual(expected, result) + + result = self.metadata._combine_lenient_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_different(self): + left = self.values.copy() + right = self.values.copy() + left["two"] = left["four"] = self.dummy + + result = self.metadata._combine_lenient_attributes(left, right) + expected = self.values.copy() + for key in ["two", "four"]: + del expected[key] + self.assertDictEqual(expected, result) + + result = self.metadata._combine_lenient_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_different_none(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = None + + result = self.metadata._combine_lenient_attributes(left, right) + expected = self.values.copy() + for key in ["one", "three", "five"]: + del expected[key] + self.assertDictEqual(expected, result) + + result = self.metadata._combine_lenient_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_extra(self): + left = self.values.copy() + right = self.values.copy() + left["extra_left"] = "extra_left" + right["extra_right"] = "extra_right" + + result = self.metadata._combine_lenient_attributes(left, right) + expected = self.values.copy() + expected["extra_left"] = left["extra_left"] + expected["extra_right"] = right["extra_right"] + self.assertDictEqual(expected, result) + + result = self.metadata._combine_lenient_attributes(right, left) + self.assertDictEqual(expected, result) + + +class Test__combine_strict_attributes(tests.IrisTest): + def setUp(self): + self.values = OrderedDict( + one="one", + two="two", + three=np.int32(123), + four=np.arange(10), + five=ma.arange(10), + ) + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.dummy = sentinel.dummy + + def test_same(self): + left = self.values.copy() + right = self.values.copy() + + result = self.metadata._combine_strict_attributes(left, right) + expected = left + self.assertDictEqual(expected, result) + + result = self.metadata._combine_strict_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_different(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = self.dummy + + result = self.metadata._combine_strict_attributes(left, right) + expected = self.values.copy() + for key in ["one", "three"]: + del expected[key] + self.assertDictEqual(expected, result) + + result = self.metadata._combine_strict_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_different_none(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = None + + result = self.metadata._combine_strict_attributes(left, right) + expected = self.values.copy() + for key in ["one", "three", "five"]: + del expected[key] + self.assertDictEqual(expected, result) + + result = self.metadata._combine_strict_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_extra(self): + left = self.values.copy() + right = self.values.copy() + left["extra_left"] = "extra_left" + right["extra_right"] = "extra_right" + + result = self.metadata._combine_strict_attributes(left, right) + expected = self.values.copy() + self.assertDictEqual(expected, result) + + result = self.metadata._combine_strict_attributes(right, left) + self.assertDictEqual(expected, result) + + +class Test__compare_lenient(tests.IrisTest): + def setUp(self): + self.cls = BaseMetadata + self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() + self.names = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + ) + + def test_name_same(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + with mock.patch.object( + self.cls, "_is_attributes", return_value=False + ) as mocker: + self.assertTrue(lmetadata._compare_lenient(rmetadata)) + self.assertTrue(rmetadata._compare_lenient(lmetadata)) + + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 + self.assertEqual(expected, mocker.call_count) + + def test_name_same_lenient_false__long_name_different(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + right["long_name"] = sentinel.dummy + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + with mock.patch.object( + self.cls, "_is_attributes", return_value=False + ) as mocker: + self.assertFalse(lmetadata._compare_lenient(rmetadata)) + self.assertFalse(rmetadata._compare_lenient(lmetadata)) + + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 + self.assertEqual(expected, mocker.call_count) + + def test_name_same_lenient_true__var_name_different(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + right["var_name"] = sentinel.dummy + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + with mock.patch.object( + self.cls, "_is_attributes", return_value=False + ) as mocker: + self.assertTrue(lmetadata._compare_lenient(rmetadata)) + self.assertTrue(rmetadata._compare_lenient(lmetadata)) + + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 + self.assertEqual(expected, mocker.call_count) + + def test_name_different(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + right["standard_name"] = None + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + with mock.patch.object(self.cls, "_is_attributes") as mocker: + self.assertFalse(lmetadata._compare_lenient(rmetadata)) + self.assertFalse(rmetadata._compare_lenient(lmetadata)) + + self.assertEqual(0, mocker.call_count) + + def test_strict_units(self): + left = self.none.copy() + left.update(self.names) + left["units"] = "K" + right = left.copy() + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + with mock.patch.object( + self.cls, "_is_attributes", return_value=False + ) as mocker: + self.assertTrue(lmetadata._compare_lenient(rmetadata)) + self.assertTrue(rmetadata._compare_lenient(lmetadata)) + + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 + self.assertEqual(expected, mocker.call_count) + + def test_strict_units_different(self): + left = self.none.copy() + left.update(self.names) + left["units"] = "K" + right = left.copy() + right["units"] = "m" + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + with mock.patch.object( + self.cls, "_is_attributes", return_value=False + ) as mocker: + self.assertFalse(lmetadata._compare_lenient(rmetadata)) + self.assertFalse(rmetadata._compare_lenient(lmetadata)) + + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 + self.assertEqual(expected, mocker.call_count) + + def test_attributes(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + ldict = dict(item=sentinel.left) + rdict = dict(item=sentinel.right) + left["attributes"] = ldict + right["attributes"] = rdict + rmetadata = self.cls(**right) + with mock.patch.object( + self.cls, "_compare_lenient_attributes", return_value=True, + ) as mocker: + lmetadata = self.cls(**left) + self.assertTrue(lmetadata._compare_lenient(rmetadata)) + self.assertTrue(rmetadata._compare_lenient(lmetadata)) + + self.assertEqual(2, mocker.call_count) + expected = [((ldict, rdict),), ((rdict, ldict),)] + self.assertEqual(expected, mocker.call_args_list) + + def test_attributes_non_mapping_different(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + ldict = dict(item=sentinel.left) + rdict = sentinel.right + left["attributes"] = ldict + right["attributes"] = rdict + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + self.assertFalse(lmetadata._compare_lenient(rmetadata)) + self.assertFalse(rmetadata._compare_lenient(lmetadata)) + + def test_attributes_non_mapping_different_none(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + ldict = dict(item=sentinel.left) + left["attributes"] = ldict + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + self.assertTrue(lmetadata._compare_lenient(rmetadata)) + self.assertTrue(rmetadata._compare_lenient(lmetadata)) + + def test_names(self): + left = self.none.copy() + left.update(self.names) + left["long_name"] = None + right = self.none.copy() + right["long_name"] = left["standard_name"] + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + self.assertTrue(lmetadata._compare_lenient(rmetadata)) + self.assertTrue(rmetadata._combine_lenient(lmetadata)) + + +class Test__compare_lenient_attributes(tests.IrisTest): + def setUp(self): + self.values = OrderedDict( + one=sentinel.one, + two=sentinel.two, + three=np.int16(123), + four=np.arange(10), + five=ma.arange(5), + ) + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.dummy = sentinel.dummy + + def test_same(self): + left = self.values.copy() + right = self.values.copy() + + self.assertTrue(self.metadata._compare_lenient_attributes(left, right)) + self.assertTrue(self.metadata._compare_lenient_attributes(right, left)) + + def test_different(self): + left = self.values.copy() + right = self.values.copy() + left["two"] = left["four"] = self.dummy + + self.assertFalse( + self.metadata._compare_lenient_attributes(left, right) + ) + self.assertFalse( + self.metadata._compare_lenient_attributes(right, left) + ) + + def test_different_none(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = None + + self.assertFalse( + self.metadata._compare_lenient_attributes(left, right) + ) + self.assertFalse( + self.metadata._compare_lenient_attributes(right, left) + ) + + def test_extra(self): + left = self.values.copy() + right = self.values.copy() + left["extra_left"] = sentinel.extra_left + right["extra_right"] = sentinel.extra_right + + self.assertTrue(self.metadata._compare_lenient_attributes(left, right)) + self.assertTrue(self.metadata._compare_lenient_attributes(right, left)) + + +class Test__compare_strict_attributes(tests.IrisTest): + def setUp(self): + self.values = OrderedDict( + one=sentinel.one, + two=sentinel.two, + three=np.int16(123), + four=np.arange(10), + five=ma.arange(5), + ) + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.dummy = sentinel.dummy + + def test_same(self): + left = self.values.copy() + right = self.values.copy() + + self.assertTrue(self.metadata._compare_strict_attributes(left, right)) + self.assertTrue(self.metadata._compare_strict_attributes(right, left)) + + def test_different(self): + left = self.values.copy() + right = self.values.copy() + left["two"] = left["four"] = self.dummy + + self.assertFalse(self.metadata._compare_strict_attributes(left, right)) + self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + + def test_different_none(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = None + + self.assertFalse(self.metadata._compare_strict_attributes(left, right)) + self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + + def test_extra(self): + left = self.values.copy() + right = self.values.copy() + left["extra_left"] = sentinel.extra_left + right["extra_right"] = sentinel.extra_right + + self.assertFalse(self.metadata._compare_strict_attributes(left, right)) + self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + + +class Test__difference(tests.IrisTest): + def setUp(self): + self.kwargs = dict( + standard_name="standard_name", + long_name="long_name", + var_name="var_name", + units="units", + attributes=dict(one=sentinel.one, two=sentinel.two), + ) + self.cls = BaseMetadata + self.metadata = self.cls(**self.kwargs) + + def test_lenient(self): + return_value = sentinel._difference_lenient + other = sentinel.other + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ) as mlenient: + with mock.patch.object( + self.cls, "_difference_lenient", return_value=return_value + ) as mdifference: + result = self.metadata._difference(other) + + self.assertEqual(1, mlenient.call_count) + (arg,), kwargs = mlenient.call_args + self.assertEqual(self.metadata.difference, arg) + self.assertEqual(dict(), kwargs) + + self.assertEqual(return_value, result) + self.assertEqual(1, mdifference.call_count) + (arg,), kwargs = mdifference.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_strict(self): + dummy = sentinel.dummy + values = self.kwargs.copy() + values["long_name"] = dummy + values["units"] = dummy + other = self.cls(**values) + method = "_difference_strict_attributes" + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + with mock.patch.object( + self.cls, method, return_value=None + ) as mdifference: + result = self.metadata._difference(other) + + expected = [ + (self.kwargs[field], dummy) if values[field] == dummy else None + for field in self.cls._fields + ] + self.assertEqual(expected, result) + self.assertEqual(1, mdifference.call_count) + args, kwargs = mdifference.call_args + expected = (self.kwargs["attributes"], values["attributes"]) + self.assertEqual(expected, args) + self.assertEqual(dict(), kwargs) + + with mock.patch.object( + self.cls, method, return_value=None + ) as mdifference: + result = other._difference(self.metadata) + + expected = [ + (dummy, self.kwargs[field]) if values[field] == dummy else None + for field in self.cls._fields + ] + self.assertEqual(expected, result) + self.assertEqual(1, mdifference.call_count) + args, kwargs = mdifference.call_args + expected = (self.kwargs["attributes"], values["attributes"]) + self.assertEqual(expected, args) + self.assertEqual(dict(), kwargs) + + +class Test__difference_lenient(tests.IrisTest): + def setUp(self): + self.cls = BaseMetadata + self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() + self.names = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + ) + + def test_strict_units(self): + left = self.none.copy() + left["units"] = "km" + right = left.copy() + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + expected = list(self.none.values()) + self.assertEqual(expected, lmetadata._difference_lenient(rmetadata)) + self.assertEqual(expected, rmetadata._difference_lenient(lmetadata)) + + def test_strict_units_different(self): + left = self.none.copy() + right = self.none.copy() + lunits, runits = "m", "km" + left["units"] = lunits + right["units"] = runits + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._difference_lenient(rmetadata) + expected = self.none.copy() + expected["units"] = (lunits, runits) + expected = list(expected.values()) + self.assertEqual(expected, result) + + result = rmetadata._difference_lenient(lmetadata) + expected = self.none.copy() + expected["units"] = (runits, lunits) + expected = list(expected.values()) + self.assertEqual(expected, result) + + def test_strict_units_different_none(self): + left = self.none.copy() + right = self.none.copy() + lunits, runits = "m", None + left["units"] = lunits + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._difference_lenient(rmetadata) + expected = self.none.copy() + expected["units"] = (lunits, runits) + expected = list(expected.values()) + + self.assertEqual(expected, result) + result = rmetadata._difference_lenient(lmetadata) + expected = self.none.copy() + expected["units"] = (runits, lunits) + expected = list(expected.values()) + self.assertEqual(expected, result) + + def test_attributes(self): + left = self.none.copy() + right = self.none.copy() + ldict = dict(item=sentinel.left) + rdict = dict(item=sentinel.right) + left["attributes"] = ldict + right["attributes"] = rdict + rmetadata = self.cls(**right) + return_value = sentinel.return_value + with mock.patch.object( + self.cls, + "_difference_lenient_attributes", + return_value=return_value, + ) as mocker: + lmetadata = self.cls(**left) + result = lmetadata._difference_lenient(rmetadata) + + expected = self.none.copy() + expected["attributes"] = return_value + expected = list(expected.values()) + self.assertEqual(expected, result) + + self.assertEqual(1, mocker.call_count) + args, kwargs = mocker.call_args + expected = (ldict, rdict) + self.assertEqual(expected, args) + self.assertEqual(dict(), kwargs) + + def test_attributes_non_mapping_different(self): + left = self.none.copy() + right = self.none.copy() + ldict = dict(item=sentinel.left) + rdict = sentinel.right + left["attributes"] = ldict + right["attributes"] = rdict + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._difference_lenient(rmetadata) + expected = self.none.copy() + expected["attributes"] = (ldict, rdict) + expected = list(expected.values()) + self.assertEqual(expected, result) + + result = rmetadata._difference_lenient(lmetadata) + expected = self.none.copy() + expected["attributes"] = (rdict, ldict) + expected = list(expected.values()) + self.assertEqual(expected, result) + + def test_attributes_non_mapping_different_none(self): + left = self.none.copy() + right = self.none.copy() + ldict = dict(item=sentinel.left) + left["attributes"] = ldict + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._difference_lenient(rmetadata) + expected = list(self.none.copy().values()) + self.assertEqual(expected, result) + + result = rmetadata._difference_lenient(lmetadata) + self.assertEqual(expected, result) + + def test_names(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + expected = list(self.none.values()) + self.assertEqual(expected, lmetadata._difference_lenient(rmetadata)) + self.assertEqual(expected, rmetadata._difference_lenient(lmetadata)) + + def test_names_different(self): + dummy = sentinel.dummy + left = self.none.copy() + right = self.none.copy() + left.update(self.names) + right["standard_name"] = dummy + right["long_name"] = dummy + right["var_name"] = dummy + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._difference_lenient(rmetadata) + expected = self.none.copy() + expected["standard_name"] = ( + left["standard_name"], + right["standard_name"], + ) + expected["long_name"] = (left["long_name"], right["long_name"]) + expected["var_name"] = (left["var_name"], right["var_name"]) + expected = list(expected.values()) + self.assertEqual(expected, result) + + result = rmetadata._difference_lenient(lmetadata) + expected = self.none.copy() + expected["standard_name"] = ( + right["standard_name"], + left["standard_name"], + ) + expected["long_name"] = (right["long_name"], left["long_name"]) + expected["var_name"] = (right["var_name"], left["var_name"]) + expected = list(expected.values()) + self.assertEqual(expected, result) + + def test_names_different_none(self): + left = self.none.copy() + right = self.none.copy() + left.update(self.names) + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + result = lmetadata._difference_lenient(rmetadata) + expected = list(self.none.values()) + self.assertEqual(expected, result) + + result = rmetadata._difference_lenient(lmetadata) + self.assertEqual(expected, result) + + +class Test__difference_lenient_attributes(tests.IrisTest): + def setUp(self): + self.values = OrderedDict( + one=sentinel.one, + two=sentinel.two, + three=np.float(3.14), + four=np.arange(10, dtype=np.float), + five=ma.arange(10, dtype=np.int16), + ) + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.dummy = sentinel.dummy + + def test_same(self): + left = self.values.copy() + right = self.values.copy() + + result = self.metadata._difference_lenient_attributes(left, right) + self.assertIsNone(result) + + result = self.metadata._difference_lenient_attributes(right, left) + self.assertIsNone(result) + + def test_different(self): + left = self.values.copy() + right = self.values.copy() + left["two"] = left["four"] = self.dummy + + result = self.metadata._difference_lenient_attributes(left, right) + for key in ["one", "three", "five"]: + del left[key] + del right[key] + expected_left, expected_right = (left, right) + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) + + result = self.metadata._difference_lenient_attributes(right, left) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) + + def test_different_none(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = None + + result = self.metadata._difference_lenient_attributes(left, right) + for key in ["two", "four"]: + del left[key] + del right[key] + expected_left, expected_right = (left, right) + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) + + result = self.metadata._difference_lenient_attributes(right, left) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) + + def test_extra(self): + left = self.values.copy() + right = self.values.copy() + left["extra_left"] = sentinel.extra_left + right["extra_right"] = sentinel.extra_right + result = self.metadata._difference_lenient_attributes(left, right) + self.assertIsNone(result) + + result = self.metadata._difference_lenient_attributes(right, left) + self.assertIsNone(result) + + +class Test__difference_strict_attributes(tests.IrisTest): + def setUp(self): + self.values = OrderedDict( + one=sentinel.one, + two=sentinel.two, + three=np.int32(123), + four=np.arange(10), + five=ma.arange(10), + ) + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.dummy = sentinel.dummy + + def test_same(self): + left = self.values.copy() + right = self.values.copy() + + result = self.metadata._difference_strict_attributes(left, right) + self.assertIsNone(result) + result = self.metadata._difference_strict_attributes(right, left) + self.assertIsNone(result) + + def test_different(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = self.dummy + + result = self.metadata._difference_strict_attributes(left, right) + expected_left = left.copy() + expected_right = right.copy() + for key in ["two", "four"]: + del expected_left[key] + del expected_right[key] + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) + + result = self.metadata._difference_strict_attributes(right, left) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) + + def test_different_none(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = None + + result = self.metadata._difference_strict_attributes(left, right) + expected_left = left.copy() + expected_right = right.copy() + for key in ["two", "four"]: + del expected_left[key] + del expected_right[key] + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) + + result = self.metadata._difference_strict_attributes(right, left) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) + + def test_extra(self): + left = self.values.copy() + right = self.values.copy() + left["extra_left"] = sentinel.extra_left + right["extra_right"] = sentinel.extra_right + + result = self.metadata._difference_strict_attributes(left, right) + expected_left = dict(extra_left=left["extra_left"]) + expected_right = dict(extra_right=right["extra_right"]) + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) + + result = self.metadata._difference_strict_attributes(right, left) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) + + +class Test__is_attributes(tests.IrisTest): + def setUp(self): + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.field = "attributes" + + def test_field(self): + self.assertTrue(self.metadata._is_attributes(self.field, {}, {})) + + def test_field_not_attributes(self): + self.assertFalse(self.metadata._is_attributes(None, {}, {})) + + def test_left_not_mapping(self): + self.assertFalse(self.metadata._is_attributes(self.field, None, {})) + + def test_right_not_mapping(self): + self.assertFalse(self.metadata._is_attributes(self.field, {}, None)) + + +class Test_combine(tests.IrisTest): + def setUp(self): + kwargs = dict( + standard_name="standard_name", + long_name="long_name", + var_name="var_name", + units="units", + attributes="attributes", + ) + self.cls = BaseMetadata + self.metadata = self.cls(**kwargs) + self.mock_kwargs = OrderedDict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_cannot_combine_non_class(self): + emsg = "Cannot combine" + with self.assertRaisesRegex(TypeError, emsg): + self.metadata.combine(None) + + def test_cannot_combine_different_class(self): + other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) + emsg = "Cannot combine" + with self.assertRaisesRegex(TypeError, emsg): + self.metadata.combine(other) + + def test_lenient_default(self): + return_value = self.mock_kwargs.values() + with mock.patch.object( + self.cls, "_combine", return_value=return_value + ) as mocker: + result = self.metadata.combine(self.metadata) + + self.assertEqual(self.mock_kwargs, result._asdict()) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + def test_lenient_true(self): + return_value = self.mock_kwargs.values() + with mock.patch.object( + self.cls, "_combine", return_value=return_value + ) as mcombine: + with mock.patch.object(_LENIENT, "context") as mcontext: + result = self.metadata.combine(self.metadata, lenient=True) + + self.assertEqual(1, mcontext.call_count) + (arg,), kwargs = mcontext.call_args + self.assertEqual(_qualname(self.cls.combine), arg) + self.assertEqual(dict(), kwargs) + + self.assertEqual(result._asdict(), self.mock_kwargs) + self.assertEqual(1, mcombine.call_count) + (arg,), kwargs = mcombine.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + def test_lenient_false(self): + return_value = self.mock_kwargs.values() + with mock.patch.object( + self.cls, "_combine", return_value=return_value + ) as mcombine: + with mock.patch.object(_LENIENT, "context") as mcontext: + result = self.metadata.combine(self.metadata, lenient=False) + + self.assertEqual(1, mcontext.call_count) + args, kwargs = mcontext.call_args + self.assertEqual((), args) + self.assertEqual({_qualname(self.cls.combine): False}, kwargs) + + self.assertEqual(self.mock_kwargs, result._asdict()) + self.assertEqual(1, mcombine.call_count) + (arg,), kwargs = mcombine.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + +class Test_difference(tests.IrisTest): + def setUp(self): + kwargs = dict( + standard_name="standard_name", + long_name="long_name", + var_name="var_name", + units="units", + attributes="attributes", + ) + self.cls = BaseMetadata + self.metadata = self.cls(**kwargs) + self.mock_kwargs = OrderedDict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_cannot_differ_non_class(self): + emsg = "Cannot differ" + with self.assertRaisesRegex(TypeError, emsg): + self.metadata.difference(None) + + def test_cannot_differ_different_class(self): + other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) + emsg = "Cannot differ" + with self.assertRaisesRegex(TypeError, emsg): + self.metadata.difference(other) + + def test_lenient_default(self): + return_value = self.mock_kwargs.values() + with mock.patch.object( + self.cls, "_difference", return_value=return_value + ) as mocker: + result = self.metadata.difference(self.metadata) + + self.assertEqual(self.mock_kwargs, result._asdict()) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + def test_lenient_true(self): + return_value = self.mock_kwargs.values() + with mock.patch.object( + self.cls, "_difference", return_value=return_value + ) as mdifference: + with mock.patch.object(_LENIENT, "context") as mcontext: + result = self.metadata.difference(self.metadata, lenient=True) + + self.assertEqual(1, mcontext.call_count) + (arg,), kwargs = mcontext.call_args + self.assertEqual(_qualname(self.cls.difference), arg) + self.assertEqual(dict(), kwargs) + + self.assertEqual(self.mock_kwargs, result._asdict()) + self.assertEqual(1, mdifference.call_count) + (arg,), kwargs = mdifference.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + def test_lenient_false(self): + return_value = self.mock_kwargs.values() + with mock.patch.object( + self.cls, "_difference", return_value=return_value + ) as mdifference: + with mock.patch.object(_LENIENT, "context") as mcontext: + result = self.metadata.difference(self.metadata, lenient=False) + + self.assertEqual(mcontext.call_count, 1) + args, kwargs = mcontext.call_args + self.assertEqual((), args) + self.assertEqual({_qualname(self.cls.difference): False}, kwargs) + + self.assertEqual(self.mock_kwargs, result._asdict()) + self.assertEqual(1, mdifference.call_count) + (arg,), kwargs = mdifference.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + +class Test_equal(tests.IrisTest): + def setUp(self): + kwargs = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + ) + self.cls = BaseMetadata + self.metadata = self.cls(**kwargs) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue((_LENIENT[self.cls.equal])) + + def test_cannot_compare_non_class(self): + emsg = "Cannot compare" + with self.assertRaisesRegex(TypeError, emsg): + self.metadata.equal(None) + + def test_cannot_compare_different_class(self): + other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) + emsg = "Cannot compare" + with self.assertRaisesRegex(TypeError, emsg): + self.metadata.equal(other) + + def test_lenient_default(self): + return_value = sentinel.return_value + with mock.patch.object( + self.cls, "__eq__", return_value=return_value + ) as mocker: + result = self.metadata.equal(self.metadata) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + def test_lenient_true(self): + return_value = sentinel.return_value + with mock.patch.object( + self.cls, "__eq__", return_value=return_value + ) as m__eq__: + with mock.patch.object(_LENIENT, "context") as mcontext: + result = self.metadata.equal(self.metadata, lenient=True) + + self.assertEqual(return_value, result) + self.assertEqual(1, mcontext.call_count) + (arg,), kwargs = mcontext.call_args + self.assertEqual(_qualname(self.cls.equal), arg) + self.assertEqual(dict(), kwargs) + + self.assertEqual(1, m__eq__.call_count) + (arg,), kwargs = m__eq__.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + def test_lenient_false(self): + return_value = sentinel.return_value + with mock.patch.object( + self.cls, "__eq__", return_value=return_value + ) as m__eq__: + with mock.patch.object(_LENIENT, "context") as mcontext: + result = self.metadata.equal(self.metadata, lenient=False) + + self.assertEqual(1, mcontext.call_count) + args, kwargs = mcontext.call_args + self.assertEqual((), args) + self.assertEqual({_qualname(self.cls.equal): False}, kwargs) + + self.assertEqual(return_value, result) + self.assertEqual(1, m__eq__.call_count) + (arg,), kwargs = m__eq__.call_args + self.assertEqual(id(self.metadata), id(arg)) + self.assertEqual(dict(), kwargs) + + +class Test_name(tests.IrisTest): + def setUp(self): + self.cls = BaseMetadata + self.default = self.cls.DEFAULT_NAME + + @staticmethod + def _make(standard_name=None, long_name=None, var_name=None): + return BaseMetadata( + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=None, + attributes=None, + ) + + def test_standard_name(self): + token = "standard_name" + metadata = self._make(standard_name=token) + + result = metadata.name() + self.assertEqual(token, result) + result = metadata.name(token=True) + self.assertEqual(token, result) + + def test_standard_name__invalid_token(self): + token = "nope nope" + metadata = self._make(standard_name=token) + + result = metadata.name() + self.assertEqual(token, result) + result = metadata.name(token=True) + self.assertEqual(self.default, result) + + def test_long_name(self): + token = "long_name" + metadata = self._make(long_name=token) + + result = metadata.name() + self.assertEqual(token, result) + result = metadata.name(token=True) + self.assertEqual(token, result) + + def test_long_name__invalid_token(self): + token = "nope nope" + metadata = self._make(long_name=token) + + result = metadata.name() + self.assertEqual(token, result) + result = metadata.name(token=True) + self.assertEqual(self.default, result) + + def test_var_name(self): + token = "var_name" + metadata = self._make(var_name=token) + + result = metadata.name() + self.assertEqual(token, result) + result = metadata.name(token=True) + self.assertEqual(token, result) + + def test_var_name__invalid_token(self): + token = "nope nope" + metadata = self._make(var_name=token) + + result = metadata.name() + self.assertEqual(token, result) + result = metadata.name(token=True) + self.assertEqual(self.default, result) + + def test_default(self): + metadata = self._make() + + result = metadata.name() + self.assertEqual(self.default, result) + result = metadata.name(token=True) + self.assertEqual(self.default, result) + + def test_default__invalid_token(self): + token = "nope nope" + metadata = self._make() + + result = metadata.name(default=token) + self.assertEqual(token, result) + + emsg = "Cannot retrieve a valid name token" + with self.assertRaisesRegex(ValueError, emsg): + metadata.name(default=token, token=True) + + +class Test_token(tests.IrisTest): + def setUp(self): + self.cls = BaseMetadata + + def test_passthru_None(self): + result = self.cls.token(None) + self.assertIsNone(result) + + def test_fail_leading_underscore(self): + result = self.cls.token("_nope") + self.assertIsNone(result) + + def test_fail_leading_dot(self): + result = self.cls.token(".nope") + self.assertIsNone(result) + + def test_fail_leading_plus(self): + result = self.cls.token("+nope") + self.assertIsNone(result) + + def test_fail_leading_at(self): + result = self.cls.token("@nope") + self.assertIsNone(result) + + def test_fail_space(self): + result = self.cls.token("nope nope") + self.assertIsNone(result) + + def test_fail_colon(self): + result = self.cls.token("nope:") + self.assertIsNone(result) + + def test_pass_simple(self): + token = "simple" + result = self.cls.token(token) + self.assertEqual(token, result) + + def test_pass_leading_digit(self): + token = "123simple" + result = self.cls.token(token) + self.assertEqual(token, result) + + def test_pass_mixture(self): + token = "S.imple@one+two_3" + result = self.cls.token(token) + self.assertEqual(token, result) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py new file mode 100644 index 0000000000..6044fbc628 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py @@ -0,0 +1,663 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.metadata.CellMeasureMetadata`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from copy import deepcopy +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata, CellMeasureMetadata + + +class Test(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.measure = mock.sentinel.measure + self.cls = CellMeasureMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + measure=self.measure, + ) + fmt = ( + "CellMeasureMetadata(standard_name={!r}, long_name={!r}, " + "var_name={!r}, units={!r}, attributes={!r}, measure={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + self.measure, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + "measure", + ) + self.assertEqual(self.cls._fields, expected) + + def test_bases(self): + self.assertTrue(issubclass(self.cls, BaseMetadata)) + + +class Test___eq__(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + measure=sentinel.measure, + ) + self.dummy = sentinel.dummy + self.cls = CellMeasureMetadata + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__, + ) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_call(self): + other = sentinel.other + return_value = sentinel.return_value + metadata = self.cls(*(None,) * len(self.cls._fields)) + with mock.patch.object( + BaseMetadata, "__eq__", return_value=return_value + ) as mocker: + result = metadata.__eq__(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_measure_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_measure(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_measure(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_measure_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = CellMeasureMetadata + self.one = self.cls(1, 1, 1, 1, 1, 1) + self.two = self.cls(1, 1, 1, 2, 1, 1) + self.none = self.cls(1, 1, 1, None, 1, 1) + self.attributes = self.cls(1, 1, 1, 1, 10, 1) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes(self): + result = self.one < self.attributes + self.assertFalse(result) + result = self.attributes < self.one + self.assertFalse(result) + + +class Test_combine(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + measure=sentinel.measure, + ) + self.dummy = sentinel.dummy + self.cls = CellMeasureMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__, + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_measure_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = None + rmetadata = self.cls(**right) + expected = right.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertTrue(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["units"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different_measure(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["measure"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_measure(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["measure"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_measure_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["measure"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + measure=sentinel.measure, + ) + self.dummy = sentinel.dummy + self.cls = CellMeasureMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__, + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_measure_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["measure"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["measure"] = (sentinel.measure, None) + rexpected = deepcopy(self.none)._asdict() + rexpected["measure"] = (None, sentinel.measure) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["units"] = (left["units"], right["units"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["units"] = lexpected["units"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different_measure(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["measure"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["measure"] = (left["measure"], right["measure"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["measure"] = lexpected["measure"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_measure(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["measure"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["measure"] = (left["measure"], right["measure"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["measure"] = lexpected["measure"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_measure_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["measure"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["measure"] = (left["measure"], right["measure"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["measure"] = lexpected["measure"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + +class Test_equal(tests.IrisTest): + def setUp(self): + self.cls = CellMeasureMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue(_LENIENT[self.cls.equal]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py new file mode 100644 index 0000000000..c37d33c62f --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py @@ -0,0 +1,724 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.metadata.CoordMetadata`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from copy import deepcopy +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata, CoordMetadata + + +class Test(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.coord_system = mock.sentinel.coord_system + self.climatological = mock.sentinel.climatological + self.cls = CoordMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + coord_system=self.coord_system, + climatological=self.climatological, + ) + fmt = ( + "CoordMetadata(standard_name={!r}, long_name={!r}, " + "var_name={!r}, units={!r}, attributes={!r}, coord_system={!r}, " + "climatological={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + self.coord_system, + self.climatological, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + "coord_system", + "climatological", + ) + self.assertEqual(self.cls._fields, expected) + + def test_bases(self): + self.assertTrue(issubclass(self.cls, BaseMetadata)) + + +class Test___eq__(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + coord_system=sentinel.coord_system, + climatological=sentinel.climatological, + ) + self.dummy = sentinel.dummy + self.cls = CoordMetadata + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__, + ) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_call(self): + other = sentinel.other + return_value = sentinel.return_value + metadata = self.cls(*(None,) * len(self.cls._fields)) + with mock.patch.object( + BaseMetadata, "__eq__", return_value=return_value + ) as mocker: + result = metadata.__eq__(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = CoordMetadata + self.one = self.cls(1, 1, 1, 1, 1, 1, 1) + self.two = self.cls(1, 1, 1, 2, 1, 1, 1) + self.none = self.cls(1, 1, 1, None, 1, 1, 1) + self.attributes_cs = self.cls(1, 1, 1, 1, 10, 10, 1) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes_coord_system(self): + result = self.one < self.attributes_cs + self.assertFalse(result) + result = self.attributes_cs < self.one + self.assertFalse(result) + + +class Test_combine(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + coord_system=sentinel.coord_system, + climatological=sentinel.climatological, + ) + self.dummy = sentinel.dummy + self.cls = CoordMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__, + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = right.copy() + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertTrue( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertTrue( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["units"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + coord_system=sentinel.coord_system, + climatological=sentinel.climatological, + ) + self.dummy = sentinel.dummy + self.cls = CoordMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__, + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + member_value = getattr(lmetadata, member) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (member_value, None) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = (None, member_value) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["units"] = (left["units"], right["units"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["units"] = lexpected["units"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + +class Test_equal(tests.IrisTest): + def setUp(self): + self.cls = CoordMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue(_LENIENT[self.cls.equal]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py new file mode 100644 index 0000000000..1636f85189 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -0,0 +1,831 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.metadata.CubeMetadata`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from copy import deepcopy +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata, CubeMetadata + + +def _make_metadata( + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + force_mapping=True, +): + if force_mapping: + if attributes is None: + attributes = {} + else: + attributes = dict(STASH=attributes) + + return CubeMetadata( + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=None, + attributes=attributes, + cell_methods=None, + ) + + +class Test(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.cell_methods = mock.sentinel.cell_methods + self.cls = CubeMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + cell_methods=self.cell_methods, + ) + fmt = ( + "CubeMetadata(standard_name={!r}, long_name={!r}, var_name={!r}, " + "units={!r}, attributes={!r}, cell_methods={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + self.cell_methods, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + "cell_methods", + ) + self.assertEqual(self.cls._fields, expected) + + def test_bases(self): + self.assertTrue(issubclass(self.cls, BaseMetadata)) + + +class Test___eq__(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + # Must be a mapping. + attributes=dict(), + cell_methods=sentinel.cell_methods, + ) + self.dummy = sentinel.dummy + self.cls = CubeMetadata + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__, + ) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_call(self): + other = sentinel.other + return_value = sentinel.return_value + metadata = self.cls(*(None,) * len(self.cls._fields)) + with mock.patch.object( + BaseMetadata, "__eq__", return_value=return_value + ) as mocker: + result = metadata.__eq__(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_cell_methods_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_cell_methods(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_cell_methods(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_measure_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = CubeMetadata + self.one = self.cls(1, 1, 1, 1, 1, 1) + self.two = self.cls(1, 1, 1, 2, 1, 1) + self.none = self.cls(1, 1, 1, None, 1, 1) + self.attributes_cm = self.cls(1, 1, 1, 1, 10, 10) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes_cell_methods(self): + result = self.one < self.attributes_cm + self.assertFalse(result) + result = self.attributes_cm < self.one + self.assertFalse(result) + + +class Test_combine(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + cell_methods=sentinel.cell_methods, + ) + self.dummy = sentinel.dummy + self.cls = CubeMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__, + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_cell_methods_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + expected = right.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertTrue(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["units"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different_cell_methods(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["cell_methods"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_cell_methods(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["cell_methods"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_cell_methods_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["cell_methods"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + cell_methods=sentinel.cell_methods, + ) + self.dummy = sentinel.dummy + self.cls = CubeMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__, + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_cell_methods_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["cell_methods"] = (sentinel.cell_methods, None) + rexpected = deepcopy(self.none)._asdict() + rexpected["cell_methods"] = (None, sentinel.cell_methods) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["units"] = (left["units"], right["units"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["units"] = lexpected["units"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different_cell_methods(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["cell_methods"] = ( + left["cell_methods"], + right["cell_methods"], + ) + rexpected = deepcopy(self.none)._asdict() + rexpected["cell_methods"] = lexpected["cell_methods"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_cell_methods(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["cell_methods"] = ( + left["cell_methods"], + right["cell_methods"], + ) + rexpected = deepcopy(self.none)._asdict() + rexpected["cell_methods"] = lexpected["cell_methods"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_measure_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["cell_methods"] = ( + left["cell_methods"], + right["cell_methods"], + ) + rexpected = deepcopy(self.none)._asdict() + rexpected["cell_methods"] = lexpected["cell_methods"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + +class Test_equal(tests.IrisTest): + def setUp(self): + self.cls = CubeMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue(_LENIENT[self.cls.equal]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + +class Test_name(tests.IrisTest): + def setUp(self): + self.default = CubeMetadata.DEFAULT_NAME + + def test_standard_name(self): + token = "standard_name" + metadata = _make_metadata(standard_name=token) + result = metadata.name() + self.assertEqual(result, token) + result = metadata.name(token=True) + self.assertEqual(result, token) + + def test_standard_name__invalid_token(self): + token = "nope nope" + metadata = _make_metadata(standard_name=token) + result = metadata.name() + self.assertEqual(result, token) + result = metadata.name(token=True) + self.assertEqual(result, self.default) + + def test_long_name(self): + token = "long_name" + metadata = _make_metadata(long_name=token) + result = metadata.name() + self.assertEqual(result, token) + result = metadata.name(token=True) + self.assertEqual(result, token) + + def test_long_name__invalid_token(self): + token = "nope nope" + metadata = _make_metadata(long_name=token) + result = metadata.name() + self.assertEqual(result, token) + result = metadata.name(token=True) + self.assertEqual(result, self.default) + + def test_var_name(self): + token = "var_name" + metadata = _make_metadata(var_name=token) + result = metadata.name() + self.assertEqual(result, token) + result = metadata.name(token=True) + self.assertEqual(result, token) + + def test_var_name__invalid_token(self): + token = "nope nope" + metadata = _make_metadata(var_name=token) + result = metadata.name() + self.assertEqual(result, token) + result = metadata.name(token=True) + self.assertEqual(result, self.default) + + def test_attributes(self): + token = "stash" + metadata = _make_metadata(attributes=token) + result = metadata.name() + self.assertEqual(result, token) + result = metadata.name(token=True) + self.assertEqual(result, token) + + def test_attributes__invalid_token(self): + token = "nope nope" + metadata = _make_metadata(attributes=token) + result = metadata.name() + self.assertEqual(result, token) + result = metadata.name(token=True) + self.assertEqual(result, self.default) + + def test_attributes__non_mapping(self): + metadata = _make_metadata(force_mapping=False) + self.assertIsNone(metadata.attributes) + emsg = "Invalid 'CubeMetadata.attributes' member, must be a mapping." + with self.assertRaisesRegex(AttributeError, emsg): + _ = metadata.name() + + def test_default(self): + metadata = _make_metadata() + result = metadata.name() + self.assertEqual(result, self.default) + result = metadata.name(token=True) + self.assertEqual(result, self.default) + + def test_default__invalid_token(self): + token = "nope nope" + metadata = _make_metadata() + result = metadata.name(default=token) + self.assertEqual(result, token) + emsg = "Cannot retrieve a valid name token" + with self.assertRaisesRegex(ValueError, emsg): + _ = metadata.name(default=token, token=True) + + +class Test__names(tests.IrisTest): + def test_standard_name(self): + token = "standard_name" + metadata = _make_metadata(standard_name=token) + expected = (token, None, None, None) + result = metadata._names + self.assertEqual(expected, result) + + def test_long_name(self): + token = "long_name" + metadata = _make_metadata(long_name=token) + expected = (None, token, None, None) + result = metadata._names + self.assertEqual(expected, result) + + def test_var_name(self): + token = "var_name" + metadata = _make_metadata(var_name=token) + expected = (None, None, token, None) + result = metadata._names + self.assertEqual(expected, result) + + def test_attributes(self): + token = "stash" + metadata = _make_metadata(attributes=token) + expected = (None, None, None, token) + result = metadata._names + self.assertEqual(expected, result) + + def test_attributes__non_mapping(self): + metadata = _make_metadata(force_mapping=False) + self.assertIsNone(metadata.attributes) + emsg = "Invalid 'CubeMetadata.attributes' member, must be a mapping." + with self.assertRaisesRegex(AttributeError, emsg): + _ = metadata._names + + def test_None(self): + metadata = _make_metadata() + expected = (None, None, None, None) + result = metadata._names + self.assertEqual(expected, result) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py new file mode 100644 index 0000000000..72b3c1bc8f --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -0,0 +1,148 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from abc import abstractmethod + +from iris.common.metadata import _NamedTupleMeta + + +class Test(tests.IrisTest): + @staticmethod + def names(classes): + return [cls.__name__ for cls in classes] + + @staticmethod + def emsg_generate(members): + if isinstance(members, str): + members = (members,) + emsg = ".* missing {} required positional argument{}: {}" + args = ", ".join([f"{member!r}" for member in members[:-1]]) + count = len(members) + if count == 1: + args += f"{members[-1]!r}" + elif count == 2: + args += f" and {members[-1]!r}" + else: + args += f", and {members[-1]!r}" + plural = "s" if count > 1 else "" + return emsg.format(len(members), plural, args) + + def test__no_bases_with_abstract_members_property(self): + class Metadata(metaclass=_NamedTupleMeta): + @property + @abstractmethod + def _members(self): + pass + + expected = ["object"] + self.assertEqual(self.names(Metadata.__bases__), expected) + expected = ["Metadata", "object"] + self.assertEqual(self.names(Metadata.__mro__), expected) + emsg = ( + "Can't instantiate abstract class .* with abstract " + "methods _members" + ) + with self.assertRaisesRegex(TypeError, emsg): + _ = Metadata() + + def test__no_bases_single_member(self): + member = "arg_one" + + class Metadata(metaclass=_NamedTupleMeta): + _members = member + + expected = ["MetadataNamedtuple"] + self.assertEqual(self.names(Metadata.__bases__), expected) + expected = ["Metadata", "MetadataNamedtuple", "tuple", "object"] + self.assertEqual(self.names(Metadata.__mro__), expected) + emsg = self.emsg_generate(member) + with self.assertRaisesRegex(TypeError, emsg): + _ = Metadata() + metadata = Metadata(1) + self.assertEqual(metadata._fields, (member,)) + self.assertEqual(metadata.arg_one, 1) + + def test__no_bases_multiple_members(self): + members = ("arg_one", "arg_two") + + class Metadata(metaclass=_NamedTupleMeta): + _members = members + + expected = ["MetadataNamedtuple"] + self.assertEqual(self.names(Metadata.__bases__), expected) + expected = ["Metadata", "MetadataNamedtuple", "tuple", "object"] + self.assertEqual(self.names(Metadata.__mro__), expected) + emsg = self.emsg_generate(members) + with self.assertRaisesRegex(TypeError, emsg): + _ = Metadata() + values = range(len(members)) + metadata = Metadata(*values) + self.assertEqual(metadata._fields, members) + expected = dict(zip(members, values)) + self.assertEqual(metadata._asdict(), expected) + + def test__multiple_bases_multiple_members(self): + members_parent = ("arg_one", "arg_two") + members_child = ("arg_three", "arg_four") + + class MetadataParent(metaclass=_NamedTupleMeta): + _members = members_parent + + class MetadataChild(MetadataParent): + _members = members_child + + # Check the parent class... + expected = ["MetadataParentNamedtuple"] + self.assertEqual(self.names(MetadataParent.__bases__), expected) + expected = [ + "MetadataParent", + "MetadataParentNamedtuple", + "tuple", + "object", + ] + self.assertEqual(self.names(MetadataParent.__mro__), expected) + emsg = self.emsg_generate(members_parent) + with self.assertRaisesRegex(TypeError, emsg): + _ = MetadataParent() + values_parent = range(len(members_parent)) + metadata_parent = MetadataParent(*values_parent) + self.assertEqual(metadata_parent._fields, members_parent) + expected = dict(zip(members_parent, values_parent)) + self.assertEqual(metadata_parent._asdict(), expected) + + # Check the dependant child class... + expected = ["MetadataChildNamedtuple", "MetadataParent"] + self.assertEqual(self.names(MetadataChild.__bases__), expected) + expected = [ + "MetadataChild", + "MetadataChildNamedtuple", + "MetadataParent", + "MetadataParentNamedtuple", + "tuple", + "object", + ] + self.assertEqual(self.names(MetadataChild.__mro__), expected) + emsg = self.emsg_generate((*members_parent, *members_child)) + with self.assertRaisesRegex(TypeError, emsg): + _ = MetadataChild() + fields_child = (*members_parent, *members_child) + values_child = range(len(fields_child)) + metadata_child = MetadataChild(*values_child) + self.assertEqual(metadata_child._fields, fields_child) + expected = dict(zip(fields_child, values_child)) + self.assertEqual(metadata_child._asdict(), expected) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test__hexdigest.py b/lib/iris/tests/unit/common/metadata/test__hexdigest.py new file mode 100644 index 0000000000..798f71bcd0 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test__hexdigest.py @@ -0,0 +1,179 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.common.metadata._hexdigest`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from unittest import mock + +import numpy.ma as ma +import numpy as np +from xxhash import xxh64, xxh64_hexdigest + +from iris.common.metadata import _hexdigest as hexdigest + + +class TestBytesLikeObject(tests.IrisTest): + def setUp(self): + self.hasher = xxh64() + self.hasher.reset() + + @staticmethod + def _ndarray(value): + parts = str((value.shape, xxh64_hexdigest(value))) + return xxh64_hexdigest(parts) + + @staticmethod + def _masked(value): + parts = str( + ( + value.shape, + xxh64_hexdigest(value.data), + xxh64_hexdigest(value.mask), + ) + ) + return xxh64_hexdigest(parts) + + def test_string(self): + value = "hello world" + self.hasher.update(value) + expected = self.hasher.hexdigest() + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_array_int(self): + value = np.arange(10, dtype=np.int) + expected = self._ndarray(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_array_float(self): + value = np.arange(10, dtype=np.float) + expected = self._ndarray(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_array_float_not_int(self): + ivalue = np.arange(10, dtype=np.int) + fvalue = np.arange(10, dtype=np.float) + expected = self._ndarray(ivalue) + self.assertNotEqual(expected, hexdigest(fvalue)) + + def test_numpy_array_reshape(self): + value = np.arange(10).reshape(2, 5) + expected = self._ndarray(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_array_reshape_not_flat(self): + value = np.arange(10).reshape(2, 5) + expected = self._ndarray(value) + self.assertNotEqual(expected, hexdigest(value.flatten())) + + def test_masked_array_int(self): + value = ma.arange(10, dtype=np.int) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + value[0] = ma.masked + self.assertNotEqual(expected, hexdigest(value)) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + def test_masked_array_float(self): + value = ma.arange(10, dtype=np.float) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + value[0] = ma.masked + self.assertNotEqual(expected, hexdigest(value)) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + def test_masked_array_float_not_int(self): + ivalue = ma.arange(10, dtype=np.int) + fvalue = ma.arange(10, dtype=np.float) + expected = self._masked(ivalue) + self.assertNotEqual(expected, hexdigest(fvalue)) + + def test_masked_array_not_array(self): + value = ma.arange(10) + expected = self._masked(value) + self.assertNotEqual(expected, hexdigest(value.data)) + + def test_masked_array_reshape(self): + value = ma.arange(10).reshape(2, 5) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + def test_masked_array_reshape_not_flat(self): + value = ma.arange(10).reshape(2, 5) + expected = self._masked(value) + self.assertNotEqual(expected, hexdigest(value.flatten())) + + +class TestNotBytesLikeObject(tests.IrisTest): + def _expected(self, value): + parts = str((type(value), value)) + return xxh64_hexdigest(parts) + + def test_int(self): + value = 123 + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_int(self): + value = np.int(123) + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_float(self): + value = 123.4 + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_float(self): + value = np.float(123.4) + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_list(self): + value = [1, 2, 3] + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_tuple(self): + value = (1, 2, 3) + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_dict(self): + value = dict(one=1, two=2, three=3) + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_sentinel(self): + value = mock.sentinel.value + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_instance(self): + class Dummy: + pass + + value = Dummy() + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_int_not_str(self): + value = 123 + expected = self._expected(value) + self.assertNotEqual(expected, hexdigest(str(value))) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py new file mode 100644 index 0000000000..6678aca446 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -0,0 +1,210 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +import pickle +import unittest.mock as mock + +from cf_units import Unit + +from iris.common.metadata import ( + AncillaryVariableMetadata, + BaseMetadata, + CellMeasureMetadata, + CoordMetadata, + CubeMetadata, + metadata_manager_factory, +) + + +BASES = [ + AncillaryVariableMetadata, + BaseMetadata, + CellMeasureMetadata, + CoordMetadata, + CubeMetadata, +] + + +class Test_factory(tests.IrisTest): + def test__subclass_invalid(self): + class Other: + pass + + emsg = "Require a subclass of 'BaseMetadata'" + with self.assertRaisesRegex(TypeError, emsg): + _ = metadata_manager_factory(Other) + + def test__kwargs_invalid(self): + emsg = "Invalid 'BaseMetadata' field parameters, got 'wibble'." + with self.assertRaisesRegex(ValueError, emsg): + metadata_manager_factory(BaseMetadata, wibble="nope") + + +class Test_instance(tests.IrisTest): + def setUp(self): + self.bases = BASES + + def test__namespace(self): + namespace = [ + "DEFAULT_NAME", + "__init__", + "__eq__", + "__getstate__", + "__ne__", + "__reduce__", + "__repr__", + "__setstate__", + "fields", + "name", + "token", + "values", + ] + for base in self.bases: + metadata = metadata_manager_factory(base) + for name in namespace: + self.assertTrue(hasattr(metadata, name)) + if base is CubeMetadata: + self.assertTrue(hasattr(metadata, "_names")) + self.assertIs(metadata.cls, base) + + def test__kwargs_default(self): + for base in self.bases: + kwargs = dict(zip(base._fields, [None] * len(base._fields))) + metadata = metadata_manager_factory(base) + self.assertEqual(metadata.values._asdict(), kwargs) + + def test__kwargs(self): + for base in self.bases: + kwargs = dict(zip(base._fields, range(len(base._fields)))) + metadata = metadata_manager_factory(base, **kwargs) + self.assertEqual(metadata.values._asdict(), kwargs) + + +class Test_instance___eq__(tests.IrisTest): + def setUp(self): + self.metadata = metadata_manager_factory(BaseMetadata) + + def test__not_implemented(self): + self.assertNotEqual(self.metadata, 1) + + def test__not_is_cls(self): + base = BaseMetadata + other = metadata_manager_factory(base) + self.assertIs(other.cls, base) + other.cls = CoordMetadata + self.assertNotEqual(self.metadata, other) + + def test__not_values(self): + standard_name = mock.sentinel.standard_name + other = metadata_manager_factory( + BaseMetadata, standard_name=standard_name + ) + self.assertEqual(other.standard_name, standard_name) + self.assertIsNone(other.long_name) + self.assertIsNone(other.var_name) + self.assertIsNone(other.units) + self.assertIsNone(other.attributes) + self.assertNotEqual(self.metadata, other) + + def test__same_default(self): + other = metadata_manager_factory(BaseMetadata) + self.assertEqual(self.metadata, other) + + def test__same(self): + kwargs = dict( + standard_name=1, long_name=2, var_name=3, units=4, attributes=5 + ) + metadata = metadata_manager_factory(BaseMetadata, **kwargs) + other = metadata_manager_factory(BaseMetadata, **kwargs) + self.assertEqual(metadata.values._asdict(), kwargs) + self.assertEqual(metadata, other) + + +class Test_instance____repr__(tests.IrisTest): + def setUp(self): + self.metadata = metadata_manager_factory(BaseMetadata) + + def test(self): + standard_name = mock.sentinel.standard_name + long_name = mock.sentinel.long_name + var_name = mock.sentinel.var_name + units = mock.sentinel.units + attributes = mock.sentinel.attributes + values = (standard_name, long_name, var_name, units, attributes) + + for field, value in zip(self.metadata.fields, values): + setattr(self.metadata, field, value) + + result = repr(self.metadata) + expected = ( + "MetadataManager(standard_name={!r}, long_name={!r}, var_name={!r}, " + "units={!r}, attributes={!r})" + ) + self.assertEqual(result, expected.format(*values)) + + +class Test_instance__pickle(tests.IrisTest): + def setUp(self): + self.standard_name = "standard_name" + self.long_name = "long_name" + self.var_name = "var_name" + self.units = Unit("1") + self.attributes = dict(hello="world") + values = ( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + ) + self.kwargs = dict(zip(BaseMetadata._fields, values)) + self.metadata = metadata_manager_factory(BaseMetadata, **self.kwargs) + + def test_pickle(self): + for protocol in range(pickle.HIGHEST_PROTOCOL + 1): + with self.temp_filename(suffix=".pkl") as fname: + with open(fname, "wb") as fo: + pickle.dump(self.metadata, fo, protocol=protocol) + with open(fname, "rb") as fi: + metadata = pickle.load(fi) + self.assertEqual(metadata, self.metadata) + + +class Test_instance__fields(tests.IrisTest): + def setUp(self): + self.bases = BASES + + def test(self): + for base in self.bases: + fields = base._fields + metadata = metadata_manager_factory(base) + self.assertEqual(metadata.fields, fields) + for field in fields: + hasattr(metadata, field) + + +class Test_instance__values(tests.IrisTest): + def setUp(self): + self.bases = BASES + + def test(self): + for base in self.bases: + metadata = metadata_manager_factory(base) + result = metadata.values + self.assertIsInstance(result, base) + self.assertEqual(result._fields, base._fields) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/mixin/__init__.py b/lib/iris/tests/unit/common/mixin/__init__.py new file mode 100644 index 0000000000..493e140626 --- /dev/null +++ b/lib/iris/tests/unit/common/mixin/__init__.py @@ -0,0 +1,6 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :mod:`iris.common.mixin` package.""" diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py new file mode 100644 index 0000000000..5ac9361e4f --- /dev/null +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -0,0 +1,364 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.mixin.CFVariableMixin`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from collections import OrderedDict, namedtuple +from unittest import mock + +from cf_units import Unit + +from iris.common.metadata import ( + AncillaryVariableMetadata, + BaseMetadata, + CellMeasureMetadata, + CoordMetadata, + CubeMetadata, +) +from iris.common.mixin import CFVariableMixin, LimitedAttributeDict + + +class Test__getter(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.metadata = mock.sentinel.metadata + + metadata = mock.MagicMock( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + values=self.metadata, + ) + + self.item = CFVariableMixin() + self.item._metadata_manager = metadata + + def test_standard_name(self): + self.assertEqual(self.item.standard_name, self.standard_name) + + def test_long_name(self): + self.assertEqual(self.item.long_name, self.long_name) + + def test_var_name(self): + self.assertEqual(self.item.var_name, self.var_name) + + def test_units(self): + self.assertEqual(self.item.units, self.units) + + def test_attributes(self): + self.assertEqual(self.item.attributes, self.attributes) + + def test_metadata(self): + self.assertEqual(self.item.metadata, self.metadata) + + +class Test__setter(tests.IrisTest): + def setUp(self): + metadata = mock.MagicMock( + standard_name=mock.sentinel.standard_name, + long_name=mock.sentinel.long_name, + var_name=mock.sentinel.var_name, + units=mock.sentinel.units, + attributes=mock.sentinel.attributes, + token=lambda name: name, + ) + + self.item = CFVariableMixin() + self.item._metadata_manager = metadata + + def test_standard_name__valid(self): + standard_name = "air_temperature" + self.item.standard_name = standard_name + self.assertEqual( + self.item._metadata_manager.standard_name, standard_name + ) + + def test_standard_name__none(self): + self.item.standard_name = None + self.assertIsNone(self.item._metadata_manager.standard_name) + + def test_standard_name__invalid(self): + standard_name = "nope nope" + emsg = f"{standard_name!r} is not a valid standard_name" + with self.assertRaisesRegex(ValueError, emsg): + self.item.standard_name = standard_name + + def test_long_name(self): + long_name = "long_name" + self.item.long_name = long_name + self.assertEqual(self.item._metadata_manager.long_name, long_name) + + def test_long_name__none(self): + self.item.long_name = None + self.assertIsNone(self.item._metadata_manager.long_name) + + def test_var_name(self): + var_name = "var_name" + self.item.var_name = var_name + self.assertEqual(self.item._metadata_manager.var_name, var_name) + + def test_var_name__none(self): + self.item.var_name = None + self.assertIsNone(self.item._metadata_manager.var_name) + + def test_var_name__invalid_token(self): + var_name = "nope nope" + self.item._metadata_manager.token = lambda name: None + emsg = f"{var_name!r} is not a valid NetCDF variable name." + with self.assertRaisesRegex(ValueError, emsg): + self.item.var_name = var_name + + def test_attributes(self): + attributes = dict(hello="world") + self.item.attributes = attributes + self.assertEqual(self.item._metadata_manager.attributes, attributes) + self.assertIsNot(self.item._metadata_manager.attributes, attributes) + self.assertIsInstance( + self.item._metadata_manager.attributes, LimitedAttributeDict + ) + + def test_attributes__none(self): + self.item.attributes = None + self.assertEqual(self.item._metadata_manager.attributes, {}) + + +class Test__metadata_setter(tests.IrisTest): + def setUp(self): + class Metadata: + def __init__(self): + self.cls = BaseMetadata + self.fields = BaseMetadata._fields + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.token = lambda name: name + + @property + def values(self): + return dict( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + ) + + metadata = Metadata() + self.item = CFVariableMixin() + self.item._metadata_manager = metadata + self.attributes = dict(one=1, two=2, three=3) + self.args = OrderedDict( + standard_name="air_temperature", + long_name="long_name", + var_name="var_name", + units=Unit("1"), + attributes=self.attributes, + ) + + def test_dict(self): + metadata = dict(**self.args) + self.item.metadata = metadata + self.assertEqual(self.item._metadata_manager.values, metadata) + self.assertIsNot( + self.item._metadata_manager.attributes, self.attributes + ) + + def test_dict__partial(self): + metadata = dict(**self.args) + del metadata["standard_name"] + self.item.metadata = metadata + metadata["standard_name"] = mock.sentinel.standard_name + self.assertEqual(self.item._metadata_manager.values, metadata) + self.assertIsNot( + self.item._metadata_manager.attributes, self.attributes + ) + + def test_ordereddict(self): + metadata = self.args + self.item.metadata = metadata + self.assertEqual(self.item._metadata_manager.values, metadata) + self.assertIsNot( + self.item._metadata_manager.attributes, self.attributes + ) + + def test_ordereddict__partial(self): + metadata = self.args + del metadata["long_name"] + del metadata["units"] + self.item.metadata = metadata + metadata["long_name"] = mock.sentinel.long_name + metadata["units"] = mock.sentinel.units + self.assertEqual(self.item._metadata_manager.values, metadata) + + def test_tuple(self): + metadata = tuple(self.args.values()) + self.item.metadata = metadata + result = tuple( + [ + getattr(self.item._metadata_manager, field) + for field in self.item._metadata_manager.fields + ] + ) + self.assertEqual(result, metadata) + self.assertIsNot( + self.item._metadata_manager.attributes, self.attributes + ) + + def test_tuple__missing(self): + metadata = list(self.args.values()) + del metadata[2] + emsg = "Invalid .* metadata, require .* to be specified." + with self.assertRaisesRegex(TypeError, emsg): + self.item.metadata = tuple(metadata) + + def test_namedtuple(self): + Metadata = namedtuple( + "Metadata", + ("standard_name", "long_name", "var_name", "units", "attributes"), + ) + metadata = Metadata(**self.args) + self.item.metadata = metadata + self.assertEqual( + self.item._metadata_manager.values, metadata._asdict() + ) + self.assertIsNot( + self.item._metadata_manager.attributes, metadata.attributes + ) + + def test_namedtuple__partial(self): + Metadata = namedtuple( + "Metadata", ("standard_name", "long_name", "var_name", "units") + ) + del self.args["attributes"] + metadata = Metadata(**self.args) + self.item.metadata = metadata + expected = metadata._asdict() + expected.update(dict(attributes=mock.sentinel.attributes)) + self.assertEqual(self.item._metadata_manager.values, expected) + + def test_class_ancillaryvariablemetadata(self): + metadata = AncillaryVariableMetadata(**self.args) + self.item.metadata = metadata + self.assertEqual( + self.item._metadata_manager.values, metadata._asdict() + ) + self.assertIsNot( + self.item._metadata_manager.attributes, metadata.attributes + ) + + def test_class_basemetadata(self): + metadata = BaseMetadata(**self.args) + self.item.metadata = metadata + self.assertEqual( + self.item._metadata_manager.values, metadata._asdict() + ) + self.assertIsNot( + self.item._metadata_manager.attributes, metadata.attributes + ) + + def test_class_cellmeasuremetadata(self): + self.args["measure"] = None + metadata = CellMeasureMetadata(**self.args) + self.item.metadata = metadata + expected = metadata._asdict() + del expected["measure"] + self.assertEqual(self.item._metadata_manager.values, expected) + self.assertIsNot( + self.item._metadata_manager.attributes, metadata.attributes + ) + + def test_class_coordmetadata(self): + self.args.update(dict(coord_system=None, climatological=False)) + metadata = CoordMetadata(**self.args) + self.item.metadata = metadata + expected = metadata._asdict() + del expected["coord_system"] + del expected["climatological"] + self.assertEqual(self.item._metadata_manager.values, expected) + self.assertIsNot( + self.item._metadata_manager.attributes, metadata.attributes + ) + + def test_class_cubemetadata(self): + self.args["cell_methods"] = None + metadata = CubeMetadata(**self.args) + self.item.metadata = metadata + expected = metadata._asdict() + del expected["cell_methods"] + self.assertEqual(self.item._metadata_manager.values, expected) + self.assertIsNot( + self.item._metadata_manager.attributes, metadata.attributes + ) + + +class Test_rename(tests.IrisTest): + def setUp(self): + metadata = mock.MagicMock( + standard_name=mock.sentinel.standard_name, + long_name=mock.sentinel.long_name, + var_name=mock.sentinel.var_name, + units=mock.sentinel.units, + attributes=mock.sentinel.attributes, + values=mock.sentinel.metadata, + token=lambda name: name, + ) + + self.item = CFVariableMixin() + self.item._metadata_manager = metadata + + def test__valid_standard_name(self): + name = "air_temperature" + self.item.rename(name) + self.assertEqual(self.item._metadata_manager.standard_name, name) + self.assertIsNone(self.item._metadata_manager.long_name) + self.assertIsNone(self.item._metadata_manager.var_name) + + def test__invalid_standard_name(self): + name = "nope nope" + self.item.rename(name) + self.assertIsNone(self.item._metadata_manager.standard_name) + self.assertEqual(self.item._metadata_manager.long_name, name) + self.assertIsNone(self.item._metadata_manager.var_name) + + +class Test_name(tests.IrisTest): + def setUp(self): + class Metadata: + def __init__(self, name): + self.name = mock.MagicMock(return_value=name) + + self.name = mock.sentinel.name + metadata = Metadata(self.name) + + self.item = CFVariableMixin() + self.item._metadata_manager = metadata + + def test(self): + default = mock.sentinel.default + token = mock.sentinel.token + result = self.item.name(default=default, token=token) + self.assertEqual(result, self.name) + self.item._metadata_manager.name.assert_called_with( + default=default, token=token + ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py new file mode 100644 index 0000000000..bfaeae2daf --- /dev/null +++ b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py @@ -0,0 +1,69 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from unittest import mock +import numpy as np + +from iris.common.mixin import LimitedAttributeDict + + +class Test(tests.IrisTest): + def setUp(self): + self.forbidden_keys = LimitedAttributeDict._forbidden_keys + self.emsg = "{!r} is not a permitted attribute" + + def test__invalid_keys(self): + for key in self.forbidden_keys: + with self.assertRaisesRegex(ValueError, self.emsg.format(key)): + _ = LimitedAttributeDict(**{key: None}) + + def test___eq__(self): + values = dict( + one=mock.sentinel.one, + two=mock.sentinel.two, + three=mock.sentinel.three, + ) + left = LimitedAttributeDict(**values) + right = LimitedAttributeDict(**values) + self.assertEqual(left, right) + self.assertEqual(left, values) + + def test___eq___numpy(self): + values = dict(one=np.arange(1), two=np.arange(2), three=np.arange(3),) + left = LimitedAttributeDict(**values) + right = LimitedAttributeDict(**values) + self.assertEqual(left, right) + self.assertEqual(left, values) + values = dict(one=np.arange(1), two=np.arange(1), three=np.arange(1),) + left = LimitedAttributeDict(dict(one=0, two=0, three=0)) + right = LimitedAttributeDict(**values) + self.assertEqual(left, right) + self.assertEqual(left, values) + + def test___setitem__(self): + for key in self.forbidden_keys: + item = LimitedAttributeDict() + with self.assertRaisesRegex(ValueError, self.emsg.format(key)): + item[key] = None + + def test_update(self): + for key in self.forbidden_keys: + item = LimitedAttributeDict() + with self.assertRaisesRegex(ValueError, self.emsg.format(key)): + other = {key: None} + item.update(other) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/cube_coord_common/test_get_valid_standard_name.py b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py similarity index 70% rename from lib/iris/tests/unit/cube_coord_common/test_get_valid_standard_name.py rename to lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py index ae084f33e4..6d6dcb182e 100644 --- a/lib/iris/tests/unit/cube_coord_common/test_get_valid_standard_name.py +++ b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py @@ -4,7 +4,7 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Unit tests for the :func:`iris._cube_coord_common.get_valid_standard_name`. +Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`. """ @@ -12,7 +12,7 @@ # importing anything else. import iris.tests as tests -from iris._cube_coord_common import get_valid_standard_name +from iris.common.mixin import _get_valid_standard_name class Test(tests.IrisTest): @@ -21,51 +21,51 @@ def setUp(self): def test_pass_thru_none(self): name = None - self.assertEqual(get_valid_standard_name(name), name) + self.assertEqual(_get_valid_standard_name(name), name) def test_pass_thru_empty(self): name = "" - self.assertEqual(get_valid_standard_name(name), name) + self.assertEqual(_get_valid_standard_name(name), name) def test_pass_thru_whitespace(self): name = " " - self.assertEqual(get_valid_standard_name(name), name) + self.assertEqual(_get_valid_standard_name(name), name) def test_valid_standard_name(self): name = "air_temperature" - self.assertEqual(get_valid_standard_name(name), name) + self.assertEqual(_get_valid_standard_name(name), name) def test_standard_name_alias(self): name = "atmosphere_optical_thickness_due_to_pm1_ambient_aerosol" - self.assertEqual(get_valid_standard_name(name), name) + self.assertEqual(_get_valid_standard_name(name), name) def test_invalid_standard_name(self): name = "not_a_standard_name" with self.assertRaisesRegex(ValueError, self.emsg.format(name)): - get_valid_standard_name(name) + _get_valid_standard_name(name) def test_valid_standard_name_valid_modifier(self): name = "air_temperature standard_error" - self.assertEqual(get_valid_standard_name(name), name) + self.assertEqual(_get_valid_standard_name(name), name) def test_valid_standard_name_valid_modifier_extra_spaces(self): name = "air_temperature standard_error" - self.assertEqual(get_valid_standard_name(name), name) + self.assertEqual(_get_valid_standard_name(name), name) def test_invalid_standard_name_valid_modifier(self): name = "not_a_standard_name standard_error" with self.assertRaisesRegex(ValueError, self.emsg.format(name)): - get_valid_standard_name(name) + _get_valid_standard_name(name) def test_valid_standard_invalid_name_modifier(self): name = "air_temperature extra_names standard_error" with self.assertRaisesRegex(ValueError, self.emsg.format(name)): - get_valid_standard_name(name) + _get_valid_standard_name(name) def test_valid_standard_valid_name_modifier_extra_names(self): name = "air_temperature standard_error extra words" with self.assertRaisesRegex(ValueError, self.emsg.format(name)): - get_valid_standard_name(name) + _get_valid_standard_name(name) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py index 6965ea7a2f..9b101362a5 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py @@ -70,7 +70,7 @@ def test_basic(self): cube = self.cube time_coord = self.time_coord expected_coord = iris.coords.AuxCoord( - self.hour_numbers % 24, long_name=coord_name + self.hour_numbers % 24, long_name=coord_name, units="1" ) ccat.add_hour(cube, time_coord, coord_name) diff --git a/lib/iris/tests/unit/coords/test_CellMethod.py b/lib/iris/tests/unit/coords/test_CellMethod.py index 88906dd905..530c39cf6d 100644 --- a/lib/iris/tests/unit/coords/test_CellMethod.py +++ b/lib/iris/tests/unit/coords/test_CellMethod.py @@ -11,7 +11,7 @@ # importing anything else. import iris.tests as tests -from iris._cube_coord_common import CFVariableMixin +from iris.common import BaseMetadata from iris.coords import CellMethod, AuxCoord @@ -21,7 +21,7 @@ def setUp(self): def _check(self, token, coord, default=False): result = CellMethod(self.method, coords=coord) - token = token if not default else CFVariableMixin._DEFAULT_NAME + token = token if not default else BaseMetadata.DEFAULT_NAME expected = "{}: {}".format(self.method, token) self.assertEqual(str(result), expected) @@ -54,7 +54,7 @@ def test_coord_var_name_fail(self): def test_coord_stash(self): token = "stash" coord = AuxCoord(1, attributes=dict(STASH=token)) - self._check(token, coord) + self._check(token, coord, default=True) def test_coord_stash_default(self): token = "_stash" # includes leading underscore @@ -84,7 +84,7 @@ def test_mixture_default(self): token = "air temperature" # includes space coord = AuxCoord(1, long_name=token) result = CellMethod(self.method, coords=[coord, token]) - expected = "{}: unknown, unknown".format(self.method, token, token) + expected = "{}: unknown, unknown".format(self.method) self.assertEqual(str(result), expected) diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index b3fdd215d6..b7fa7a5ce7 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -1010,6 +1010,17 @@ def test_remove_bounds(self): coord.bounds = None self.assertFalse(coord.climatological) + def test_change_units(self): + coord = AuxCoord( + points=[0, 1], + bounds=[[0, 1], [1, 2]], + units="days since 1970-01-01", + climatological=True, + ) + self.assertTrue(coord.climatological) + coord.units = "K" + self.assertFalse(coord.climatological) + class Test___init____abstractmethod(tests.IrisTest): def test(self): diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 9c03f0f4d4..3b98be6454 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -276,7 +276,7 @@ def test_byteorder_true(self): def test_cell_measures(self): cube = stock.simple_3d_w_multidim_coords() cm_a = iris.coords.CellMeasure( - np.zeros(cube.shape[-2:]), measure="area" + np.zeros(cube.shape[-2:]), measure="area", units="1" ) cube.add_cell_measure(cm_a, (1, 2)) cm_v = iris.coords.CellMeasure( @@ -1077,7 +1077,10 @@ def create_cube(lon_min, lon_max, bounds=False): 0, ) cube.add_aux_coord( - iris.coords.AuxCoord([1.0, 0.9, 0.8, 0.6], long_name="sigma"), 0 + iris.coords.AuxCoord( + [1.0, 0.9, 0.8, 0.6], long_name="sigma", units="1" + ), + 0, ) cube.add_dim_coord( iris.coords.DimCoord([-45, 0, 45], "latitude", units="degrees"), 1 diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index 985c5b6576..2e7b110d60 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -151,16 +151,18 @@ class Test_merge__time_triple(tests.IrisTest): @staticmethod def _make_cube(fp, rt, t, realization=None): cube = Cube(np.arange(20).reshape(4, 5)) - cube.add_dim_coord(DimCoord(np.arange(5), long_name="x"), 1) - cube.add_dim_coord(DimCoord(np.arange(4), long_name="y"), 0) - cube.add_aux_coord(DimCoord(fp, standard_name="forecast_period")) + cube.add_dim_coord(DimCoord(np.arange(5), long_name="x", units="1"), 1) + cube.add_dim_coord(DimCoord(np.arange(4), long_name="y", units="1"), 0) cube.add_aux_coord( - DimCoord(rt, standard_name="forecast_reference_time") + DimCoord(fp, standard_name="forecast_period", units="1") ) - cube.add_aux_coord(DimCoord(t, standard_name="time")) + cube.add_aux_coord( + DimCoord(rt, standard_name="forecast_reference_time", units="1") + ) + cube.add_aux_coord(DimCoord(t, standard_name="time", units="1")) if realization is not None: cube.add_aux_coord( - DimCoord(realization, standard_name="realization") + DimCoord(realization, standard_name="realization", units="1") ) return cube diff --git a/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py b/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py deleted file mode 100644 index 0f08d397cb..0000000000 --- a/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris._cube_coord_common.CFVariableMixin`. -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from iris._cube_coord_common import CFVariableMixin - - -class Test_token(tests.IrisTest): - def test_passthru_None(self): - result = CFVariableMixin.token(None) - self.assertIsNone(result) - - def test_fail_leading_underscore(self): - result = CFVariableMixin.token("_nope") - self.assertIsNone(result) - - def test_fail_leading_dot(self): - result = CFVariableMixin.token(".nope") - self.assertIsNone(result) - - def test_fail_leading_plus(self): - result = CFVariableMixin.token("+nope") - self.assertIsNone(result) - - def test_fail_leading_at(self): - result = CFVariableMixin.token("@nope") - self.assertIsNone(result) - - def test_fail_space(self): - result = CFVariableMixin.token("nope nope") - self.assertIsNone(result) - - def test_fail_colon(self): - result = CFVariableMixin.token("nope:") - self.assertIsNone(result) - - def test_pass_simple(self): - token = "simple" - result = CFVariableMixin.token(token) - self.assertEqual(result, token) - - def test_pass_leading_digit(self): - token = "123simple" - result = CFVariableMixin.token(token) - self.assertEqual(result, token) - - def test_pass_mixture(self): - token = "S.imple@one+two_3" - result = CFVariableMixin.token(token) - self.assertEqual(result, token) - - -class Test_name(tests.IrisTest): - def setUp(self): - # None token CFVariableMixin - self.cf_var = CFVariableMixin() - self.cf_var.standard_name = None - self.cf_var.long_name = None - self.cf_var.var_name = None - self.cf_var.attributes = {} - self.default = CFVariableMixin._DEFAULT_NAME - # bad token CFVariableMixin - self.cf_bad = CFVariableMixin() - self.cf_bad.standard_name = None - self.cf_bad.long_name = "nope nope" - self.cf_bad.var_name = None - self.cf_bad.attributes = {"STASH": "nope nope"} - - def test_standard_name(self): - token = "air_temperature" - self.cf_var.standard_name = token - result = self.cf_var.name() - self.assertEqual(result, token) - - def test_long_name(self): - token = "long_name" - self.cf_var.long_name = token - result = self.cf_var.name() - self.assertEqual(result, token) - - def test_var_name(self): - token = "var_name" - self.cf_var.var_name = token - result = self.cf_var.name() - self.assertEqual(result, token) - - def test_stash(self): - token = "stash" - self.cf_var.attributes["STASH"] = token - result = self.cf_var.name() - self.assertEqual(result, token) - - def test_default(self): - result = self.cf_var.name() - self.assertEqual(result, self.default) - - def test_token_long_name(self): - token = "long_name" - self.cf_bad.long_name = token - result = self.cf_bad.name(token=True) - self.assertEqual(result, token) - - def test_token_var_name(self): - token = "var_name" - self.cf_bad.var_name = token - result = self.cf_bad.name(token=True) - self.assertEqual(result, token) - - def test_token_stash(self): - token = "stash" - self.cf_bad.attributes["STASH"] = token - result = self.cf_bad.name(token=True) - self.assertEqual(result, token) - - def test_token_default(self): - result = self.cf_var.name(token=True) - self.assertEqual(result, self.default) - - def test_fail_token_default(self): - emsg = "Cannot retrieve a valid name token" - with self.assertRaisesRegex(ValueError, emsg): - self.cf_var.name(default="_nope", token=True) - - -class Test_names(tests.IrisTest): - def setUp(self): - self.cf_var = CFVariableMixin() - self.cf_var.standard_name = None - self.cf_var.long_name = None - self.cf_var.var_name = None - self.cf_var.attributes = dict() - - def test_standard_name(self): - standard_name = "air_temperature" - self.cf_var.standard_name = standard_name - expected = (standard_name, None, None, None) - result = self.cf_var.names - self.assertEqual(expected, result) - self.assertEqual(result.standard_name, standard_name) - - def test_long_name(self): - long_name = "air temperature" - self.cf_var.long_name = long_name - expected = (None, long_name, None, None) - result = self.cf_var.names - self.assertEqual(expected, result) - self.assertEqual(result.long_name, long_name) - - def test_var_name(self): - var_name = "atemp" - self.cf_var.var_name = var_name - expected = (None, None, var_name, None) - result = self.cf_var.names - self.assertEqual(expected, result) - self.assertEqual(result.var_name, var_name) - - def test_STASH(self): - stash = "m01s16i203" - self.cf_var.attributes = dict(STASH=stash) - expected = (None, None, None, stash) - result = self.cf_var.names - self.assertEqual(expected, result) - self.assertEqual(result.STASH, stash) - - def test_None(self): - expected = (None, None, None, None) - result = self.cf_var.names - self.assertEqual(expected, result) - - -class Test_standard_name__setter(tests.IrisTest): - def test_valid_standard_name(self): - cf_var = CFVariableMixin() - cf_var.standard_name = "air_temperature" - self.assertEqual(cf_var.standard_name, "air_temperature") - - def test_invalid_standard_name(self): - cf_var = CFVariableMixin() - emsg = "'not_a_standard_name' is not a valid standard_name" - with self.assertRaisesRegex(ValueError, emsg): - cf_var.standard_name = "not_a_standard_name" - - def test_none_standard_name(self): - cf_var = CFVariableMixin() - cf_var.standard_name = None - self.assertIsNone(cf_var.standard_name) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index 8746625f7e..aa8a363895 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -79,7 +79,10 @@ def test_static_level(self): def test_coord_input(self): source = AuxCoord(self.src_levels.data) - source.metadata = self.src_levels.metadata + metadata = self.src_levels.metadata._asdict() + metadata["coord_system"] = None + metadata["climatological"] = None + source.metadata = metadata for axis in self.axes: result = relevel(self.cube, source, [0, 12, 13], axis=axis) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py index 08595ed3f3..acea552fdf 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py @@ -161,7 +161,7 @@ def _simple_cube(self, dtype): points = np.arange(3, dtype=dtype) bounds = np.arange(6, dtype=dtype).reshape(3, 2) cube = Cube(data, "air_pressure_anomaly") - coord = DimCoord(points, bounds=bounds) + coord = DimCoord(points, bounds=bounds, units="1") cube.add_dim_coord(coord, 0) return cube diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py index 3bbac6b309..609f7d097a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py @@ -23,7 +23,9 @@ class TestAtmosphereHybridSigmaPressureCoordinate(tests.IrisTest): def setUp(self): standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" self.requires = dict(formula_type=standard_name) - coordinates = [(mock.sentinel.b, "b"), (mock.sentinel.ps, "ps")] + self.ap = mock.MagicMock(units="units") + self.ps = mock.MagicMock(units="units") + coordinates = [(mock.sentinel.b, "b"), (self.ps, "ps")] self.provides = dict(coordinates=coordinates) self.engine = mock.Mock(requires=self.requires, provides=self.provides) self.cube = mock.create_autospec(Cube, spec_set=True, instance=True) @@ -34,7 +36,7 @@ def setUp(self): self.addCleanup(patcher.stop) def test_formula_terms_ap(self): - self.provides["coordinates"].append((mock.sentinel.ap, "ap")) + self.provides["coordinates"].append((self.ap, "ap")) self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") _load_aux_factory(self.engine, self.cube) # Check cube.add_aux_coord method. @@ -44,9 +46,9 @@ def test_formula_terms_ap(self): args, _ = self.cube.add_aux_factory.call_args self.assertEqual(len(args), 1) factory = args[0] - self.assertEqual(factory.delta, mock.sentinel.ap) + self.assertEqual(factory.delta, self.ap) self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, mock.sentinel.ps) + self.assertEqual(factory.surface_air_pressure, self.ps) def test_formula_terms_a_p0(self): coord_a = DimCoord(np.arange(5), units="Pa") @@ -78,7 +80,7 @@ def test_formula_terms_a_p0(self): factory = args[0] self.assertEqual(factory.delta, coord_expected) self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, mock.sentinel.ps) + self.assertEqual(factory.surface_air_pressure, self.ps) def test_formula_terms_p0_non_scalar(self): coord_p0 = DimCoord(np.arange(5)) @@ -113,7 +115,7 @@ def _check_no_delta(self): # Check that the factory has no delta term self.assertEqual(factory.delta, None) self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, mock.sentinel.ps) + self.assertEqual(factory.surface_air_pressure, self.ps) def test_formula_terms_ap_missing_coords(self): self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py index d10c1218ab..d44b5a1d54 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py @@ -269,7 +269,7 @@ def test_month_coord(self): res = _all_other_rules(field)[AUX_COORDS_INDEX] expected = [ - (AuxCoord(3, long_name="month_number"), None), + (AuxCoord(3, long_name="month_number", units="1"), None), (AuxCoord("Mar", long_name="month", units=Unit("no unit")), None), ( DimCoord( diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py index 70807408d0..b7074f3c00 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py @@ -23,7 +23,8 @@ class Test(TestField): def test_valid(self): coords_and_dims = _convert_scalar_pseudo_level_coords(lbuser5=21) self.assertEqual( - coords_and_dims, [(DimCoord([21], long_name="pseudo_level"), None)] + coords_and_dims, + [(DimCoord([21], long_name="pseudo_level", units="1"), None)], ) def test_missing_indicator(self): diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py index 4a4649c978..929f65c921 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py @@ -24,7 +24,7 @@ def test_valid(self): coords_and_dims = _convert_scalar_realization_coords(lbrsvd4=21) self.assertEqual( coords_and_dims, - [(DimCoord([21], standard_name="realization"), None)], + [(DimCoord([21], standard_name="realization", units="1"), None)], ) def test_missing_indicator(self): diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py index b3a6e537ac..b9a652c397 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py @@ -210,6 +210,7 @@ def _check_depth( lblev, standard_name="model_level_number", attributes={"positive": "down"}, + units="1", ), dim, ) @@ -354,6 +355,7 @@ def _check_soil_level( lblev, long_name="soil_model_level_number", attributes={"positive": "down"}, + units="1", ) expect_result = [(coord, dim)] self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) @@ -604,6 +606,7 @@ def _check( lblev, standard_name="model_level_number", attributes={"positive": "up"}, + units="1", ), dim, ) @@ -630,6 +633,7 @@ def _check( blev, long_name="sigma", bounds=np.vstack((brlev, brsvd1)).T, + units="1", ), dim, ) @@ -706,6 +710,7 @@ def _check( lblev, standard_name="model_level_number", attributes={"positive": "up"}, + units="1", ), dim, ) @@ -732,6 +737,7 @@ def _check( bhlev, long_name="sigma", bounds=np.vstack((bhrlev, brsvd2)).T, + units="1", ), dim, ) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py index 70d72fb133..8734d883cd 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py @@ -68,7 +68,6 @@ def setUp(self): # Patch the deferred loading that prevents attempted file access. # This assumes that self.cf_bounds_var is defined in the test case. def patched__getitem__(proxy_self, keys): - variable = None for var in (self.cf_coord_var, self.cf_bounds_var): if proxy_self.variable_name == var.cf_name: return var[keys] diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py index 2f02c71c9c..665beb8747 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py @@ -15,8 +15,6 @@ from unittest import mock -import numpy as np - import iris from iris.coord_systems import Mercator from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py index 8912614f96..e95f286a8d 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py @@ -15,8 +15,6 @@ from unittest import mock -import numpy as np - import iris from iris.coord_systems import Stereographic from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py index c5e36e8d8e..b752de2370 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py @@ -44,7 +44,7 @@ def test_unicode_character(self): expected_attributes = {'invalid_units': u'\u266b'} cf_var = self._make_cf_var() attr_units = get_attr_units(cf_var, attributes) - self.assertEqual(attr_units, 'unknown') + self.assertEqual(attr_units, '?') self.assertEqual(attributes, expected_attributes) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py index 1c167ec45d..4be7b04249 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py @@ -17,8 +17,6 @@ from unittest import mock -import numpy as np - from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ has_supported_mercator_parameters @@ -135,5 +133,6 @@ def test_invalid_false_northing(self): self.assertEqual(len(warns), 1) self.assertRegex(str(warns[0]), 'False northing') + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py index d02695f298..f528e22029 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py @@ -17,9 +17,6 @@ from unittest import mock -import numpy as np - -from iris.coord_systems import Stereographic from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ has_supported_stereographic_parameters diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py index 3dc6f96d48..7ce0573d25 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py @@ -335,6 +335,7 @@ def test_soil_level(self): points, long_name="soil_model_level_number", attributes={"positive": "down"}, + units="1", ) coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1), (level, (0,))] self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) @@ -416,6 +417,7 @@ def test_vertical_hybrid_height(self): [1, 2, 3], "model_level_number", attributes={"positive": "up"}, + units="1", ), (0,), ), @@ -437,6 +439,7 @@ def test_vertical_hybrid_height(self): [0.9994, 0.9979, 0.9957], long_name="sigma", bounds=[[1, 0.9989], [0.9989, 0.9970], [0.9970, 0.9944]], + units="1", ), (0,), ), diff --git a/lib/iris/util.py b/lib/iris/util.py index 3212eba4a5..2e69ca6f97 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -1061,7 +1061,7 @@ def clip_string(the_str, clip_length=70, rider="..."): Returns: The string clipped to the required length with a rider appended. - If the clip length was greater than the orignal string, the + If the clip length was greater than the original string, the original string is returned unaltered. """ @@ -1218,7 +1218,7 @@ def as_compatible_shape(src_cube, target_cube): dimension coordinates where necessary. It operates by matching coordinate metadata to infer the dimensions that need modifying, so the provided cubes must have coordinates with the same metadata - (see :class:`iris.coords.CoordDefn`). + (see :class:`iris.common.CoordMetadata`). .. note:: This function will load and copy the data payload of `src_cube`. diff --git a/requirements/core.txt b/requirements/core.txt index c3f5775d7e..56544d1926 100644 --- a/requirements/core.txt +++ b/requirements/core.txt @@ -8,7 +8,8 @@ cartopy>=0.12 cf-units>=2 cftime dask[array]>=2 #conda: dask>=2 -matplotlib +matplotlib<3.3 netcdf4 numpy>=1.14 scipy +xxhash #conda: python-xxhash diff --git a/requirements/docs.txt b/requirements/docs.txt index 6966869c70..2d2c03f688 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1 +1,4 @@ sphinx +sphinx_rtd_theme +sphinx-copybutton +sphinx-gallery diff --git a/setup.cfg b/setup.cfg index 6e8bd69f88..a87902cbfd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,7 +12,6 @@ exclude = */iris/std_names.py,\ */iris/io/format_picker.py,\ */iris/tests/__init__.py,\ */iris/tests/pp.py,\ - */iris/tests/stock.py,\ */iris/tests/system_test.py,\ */iris/tests/test_analysis.py,\ */iris/tests/test_analysis_calculus.py,\ @@ -28,8 +27,6 @@ exclude = */iris/std_names.py,\ */iris/tests/test_cube_to_pp.py,\ */iris/tests/test_file_load.py,\ */iris/tests/test_file_save.py,\ - */iris/tests/test_grib_save.py,\ - */iris/tests/test_grib_save_rules.py,\ */iris/tests/test_hybrid.py,\ */iris/tests/test_intersect.py,\ */iris/tests/test_io_init.py,\ diff --git a/setup.py b/setup.py index e5dd0e7bb9..b078e3de1f 100644 --- a/setup.py +++ b/setup.py @@ -181,7 +181,6 @@ def build_std_names(cmd, directory): xml_path = os.path.join("etc", "cf-standard-name-table.xml") module_path = os.path.join(directory, "iris", "std_names.py") args = (sys.executable, script_path, xml_path, module_path) - cmd.spawn(args) diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 3aad3bb09c..95dcce8171 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -35,14 +35,17 @@ This file is automatically generated. Do not edit this file by hand. -The file will be generated during a standard build/installation: +The file will be generated during a standard build/installation:: + python setup.py build python setup.py install -Also, the file can be re-generated in the source distribution via: +Also, the file can be re-generated in the source distribution via:: + python setup.py std_names -Or for more control (e.g. to use an alternative XML file) via: +Or for more control (e.g. to use an alternative XML file) via:: + python tools/generate_std_names.py XML_FILE MODULE_FILE """