diff --git a/.travis.yml b/.travis.yml index f938791811..32e596e1c1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,22 +5,22 @@ # we will use conda to give us a much faster setup time. -language: python -python: - - 2.7 - - 3.6 -sudo: false +language: minimal +dist: xenial env: - matrix: - - TEST_TARGET=default TEST_MINIMAL=true - - TEST_TARGET=default - - TEST_TARGET=example - - TEST_TARGET=doctest - global: # The decryption key for the encrypted .github/deploy_key.scitools-docs.enc. - secure: "N9/qBUT5CqfC7KQBDy5mIWZcGNuUJk3e/qmKJpotWYV+zwOI4GghJsRce6nFnlRiwl65l5oBEcvf3+sBvUfbZqh7U0MdHpw2tHhr2FSCmMB3bkvARZblh9M37f4da9G9VmRkqnyBM5G5TImXtoq4dusvNWKvLW0qETciaipq7ws=" + matrix: + - PYTHON_VERSION=3.6 TEST_TARGET=default TEST_MINIMAL=true + - PYTHON_VERSION=3.6 TEST_TARGET=default + - PYTHON_VERSION=3.6 TEST_TARGET=example + + - PYTHON_VERSION=3.7 TEST_TARGET=default TEST_MINIMAL=true + - PYTHON_VERSION=3.7 TEST_TARGET=default + - PYTHON_VERSION=3.7 TEST_TARGET=example + - PYTHON_VERSION=3.7 TEST_TARGET=doctest PUSH_BUILT_DOCS=true git: # We need a deep clone so that we can compute the age of the files using their git history. @@ -28,30 +28,17 @@ git: install: - > - export IRIS_TEST_DATA_REF="dba47566a9147645fea586f94a138e0a8d45a48e"; + export IRIS_TEST_DATA_REF="1696ac3a823a06b95f430670f285ee97671d2cf2"; export IRIS_TEST_DATA_SUFFIX=$(echo "${IRIS_TEST_DATA_REF}" | sed "s/^v//"); - # Cut short doctest phase under Python 2 : now only supports Python 3 - # SEE : https://github.com/SciTools/iris/pull/3134 - # ------------ - - > - if [[ $TEST_TARGET == 'doctest' && ${TRAVIS_PYTHON_VERSION} != 3* ]]; then - echo "DOCTEST phase only valid in Python 3 : ABORTING during 'install'." - exit 0 - fi - # Install miniconda # ----------------- - > echo 'Installing miniconda'; - export CONDA_BASE=https://repo.continuum.io/miniconda/Miniconda; - if [[ "$TRAVIS_PYTHON_VERSION" == 2* ]]; then - wget --quiet ${CONDA_BASE}2-latest-Linux-x86_64.sh -O miniconda.sh; - else - wget --quiet ${CONDA_BASE}3-latest-Linux-x86_64.sh -O miniconda.sh; - fi; - bash miniconda.sh -b -p $HOME/miniconda; - export PATH="$HOME/miniconda/bin:$PATH"; + export CONDA_BASE="https://repo.continuum.io/miniconda/Miniconda"; + wget --quiet ${CONDA_BASE}3-latest-Linux-x86_64.sh -O miniconda.sh; + bash miniconda.sh -b -p ${HOME}/miniconda; + export PATH="${HOME}/miniconda/bin:${PATH}"; # Create the basic testing environment # ------------------------------------ @@ -63,35 +50,31 @@ install: conda config --add channels conda-forge; conda update --quiet conda; ENV_NAME='test-environment'; - conda create --quiet -n $ENV_NAME python=$TRAVIS_PYTHON_VERSION pip; - source activate $ENV_NAME; + conda create --quiet -n ${ENV_NAME} python=${PYTHON_VERSION} pip; + source activate ${ENV_NAME}; # Customise the testing environment # --------------------------------- - > echo 'Install Iris dependencies'; - CONDA_REQS_FLAGS=""; CONDA_REQS_GROUPS="test"; - if [[ "$TRAVIS_PYTHON_VERSION" == 2* ]]; then - CONDA_REQS_FLAGS="${CONDA_REQS_FLAGS} --py2"; - fi; - if [[ "$TEST_MINIMAL" != true ]]; then + if [[ "${TEST_MINIMAL}" != true ]]; then CONDA_REQS_GROUPS="${CONDA_REQS_GROUPS} all"; fi; if [[ "${TEST_TARGET}" == 'doctest' ]]; then CONDA_REQS_GROUPS="${CONDA_REQS_GROUPS} docs"; fi; - CONDA_REQS_FILE=conda-requirements.txt; - python requirements/gen_conda_requirements.py ${CONDA_REQS_FLAGS} --groups ${CONDA_REQS_GROUPS} > ${CONDA_REQS_FILE}; + CONDA_REQS_FILE="conda-requirements.txt"; + python requirements/gen_conda_requirements.py --groups ${CONDA_REQS_GROUPS} > ${CONDA_REQS_FILE}; cat ${CONDA_REQS_FILE}; - conda install --quiet -n $ENV_NAME --file ${CONDA_REQS_FILE}; + conda install --quiet -n ${ENV_NAME} --file ${CONDA_REQS_FILE}; - - PREFIX=$HOME/miniconda/envs/$ENV_NAME + - PREFIX="${HOME}/miniconda/envs/${ENV_NAME}" # Output debug info - > - conda list -n $ENV_NAME; - conda list -n $ENV_NAME --explicit; + conda list -n ${ENV_NAME}; + conda list -n ${ENV_NAME} --explicit; conda info -a; # Pre-load Natural Earth data to avoid multiple, overlapping downloads. @@ -100,7 +83,7 @@ install: # iris test data - > - if [[ "$TEST_MINIMAL" != true ]]; then + if [[ "${TEST_MINIMAL}" != true ]]; then wget --quiet -O iris-test-data.zip https://github.com/SciTools/iris-test-data/archive/${IRIS_TEST_DATA_REF}.zip; unzip -q iris-test-data.zip; mv "iris-test-data-${IRIS_TEST_DATA_SUFFIX}" iris-test-data; @@ -108,21 +91,21 @@ install: # set config paths - > - SITE_CFG=lib/iris/etc/site.cfg; - echo "[Resources]" > $SITE_CFG; - echo "test_data_dir = $(pwd)/iris-test-data/test_data" >> $SITE_CFG; - echo "doc_dir = $(pwd)/docs/iris" >> $SITE_CFG; - echo "[System]" >> $SITE_CFG; - echo "udunits2_path = $PREFIX/lib/libudunits2.so" >> $SITE_CFG; + SITE_CFG="lib/iris/etc/site.cfg"; + echo "[Resources]" > ${SITE_CFG}; + echo "test_data_dir = $(pwd)/iris-test-data/test_data" >> ${SITE_CFG}; + echo "doc_dir = $(pwd)/docs/iris" >> ${SITE_CFG}; + echo "[System]" >> ${SITE_CFG}; + echo "udunits2_path = ${PREFIX}/lib/libudunits2.so" >> ${SITE_CFG}; - python setup.py --quiet install - # JUST FOR NOW : Install latest version of iris-grib. # TODO : remove when iris doesn't do an integration test requiring iris-grib. - - if [[ "$TEST_MINIMAL" != true && ${TRAVIS_PYTHON_VERSION} == 2* ]]; then - conda install --quiet -n $ENV_NAME python-ecmwf_grib; - pip install git+https://github.com/SciTools/iris-grib.git@v0.11.0; - fi +# TODO: uncomment and address the 5 failures and 10 errors in iris-grib. +# - if [[ "${TEST_MINIMAL}" != true ]]; then +# conda install --quiet -n ${ENV_NAME} python-eccodes; +# conda install --quiet -n ${ENV_NAME} --no-deps iris-grib; +# fi script: # Capture install-dir: As a test command must be last for get Travis to check @@ -130,31 +113,52 @@ script: - INSTALL_DIR=$(pwd) - > - if [[ $TEST_TARGET == 'default' ]]; then - export IRIS_REPO_DIR=$INSTALL_DIR; - python -m iris.tests.runner --default-tests --system-tests --print-failed-images --num-processors=3; + if [[ ${TEST_TARGET} == 'default' ]]; then + export IRIS_REPO_DIR=${INSTALL_DIR}; + python -m iris.tests.runner --default-tests --system-tests --print-failed-images; fi - - if [[ $TEST_TARGET == 'example' ]]; then - python -m iris.tests.runner --example-tests --print-failed-images --num-processors=3; + - if [[ ${TEST_TARGET} == 'example' ]]; then + python -m iris.tests.runner --example-tests --print-failed-images; fi + # A call to check "whatsnew" contributions are valid, because the Iris test + # for it needs a *developer* install to be able to find the docs. + - if [[ ${TEST_TARGET} == 'doctest' ]]; then + cd ${INSTALL_DIR}/docs/iris/src/whatsnew; + python aggregate_directory.py --checkonly; + fi + + # When pushing built docs, attempt to make a preliminary whatsnew by calling + # 'aggregate_directory.py', before the build. - > - if [[ $TEST_TARGET == 'doctest' ]]; then - MPL_RC_DIR=$HOME/.config/matplotlib; - mkdir -p $MPL_RC_DIR; - echo 'backend : agg' > $MPL_RC_DIR/matplotlibrc; - echo 'image.cmap : viridis' >> $MPL_RC_DIR/matplotlibrc; - cd $INSTALL_DIR/docs/iris; + if [[ ${PUSH_BUILT_DOCS} == 'true' ]]; then + cd ${INSTALL_DIR}/docs/iris/src/whatsnew; + WHATSNEW=$(ls -d contributions_* 2>/dev/null); + if [[ "$WHATSNEW" != "" ]]; then + python aggregate_directory.py --unreleased; + fi; + fi + + # Build the docs. + - > + if [[ ${TEST_TARGET} == 'doctest' ]]; then + MPL_RC_DIR="${HOME}/.config/matplotlib"; + mkdir -p ${MPL_RC_DIR}; + echo 'backend : agg' > ${MPL_RC_DIR}/matplotlibrc; + echo 'image.cmap : viridis' >> ${MPL_RC_DIR}/matplotlibrc; + cd ${INSTALL_DIR}/docs/iris; make clean html && make doctest; fi # Split the organisation out of the slug. See https://stackoverflow.com/a/5257398/741316 for description. - ORG=(${TRAVIS_REPO_SLUG//\// }) - # When we merge a change, and we are running in python 3, push some docs. - - if [[ $TEST_TARGET == 'doctest' && ${TRAVIS_EVENT_TYPE} == 'push' && ${TRAVIS_PYTHON_VERSION} == 3* && ${ORG} == "SciTools" ]]; then - cd $INSTALL_DIR; + # When we merge a change to SciTools/iris, we can push docs to github pages. + # At present, only the Python 3.7 "doctest" job does this. + # Results appear at https://scitools-docs.github.io/iris/<>/index.html + - if [[ ${ORG} == "SciTools" && ${TRAVIS_EVENT_TYPE} == 'push' && ${PUSH_BUILT_DOCS} == 'true' ]]; then + cd ${INSTALL_DIR}; pip install doctr; doctr deploy --deploy-repo SciTools-docs/iris --built-docs docs/iris/build/html --key-path .github/deploy_key.scitools-docs.enc @@ -162,9 +166,3 @@ script: ${TRAVIS_BRANCH:-${TRAVIS_TAG}}; fi - # An extra call to check "whatsnew" contributions are valid, because the - # Iris test for it needs a *developer* install to be able to find the docs. - - if [[ $TEST_TARGET == 'doctest' ]]; then - cd $INSTALL_DIR/docs/iris/src/whatsnew; - python aggregate_directory.py --checkonly; - fi diff --git a/README.md b/README.md index f1f98f6233..fb8660f2ad 100644 --- a/README.md +++ b/README.md @@ -11,21 +11,27 @@

+ +Travis-CI conda-forge downloads + +# contributors Latest version + +Stable docs Commits since last release - -# contributors - -Travis-CI + +Latest docs zenodo @@ -82,8 +88,10 @@ use of standard NumPy/dask arrays as its underlying data storage. # Documentation -The documentation for Iris is available at , -including a user guide, example code, and gallery. + Stable docs The documentation for *stable released versions* of Iris, including a user guide, example code, and gallery. + + Latest docs The documentation for the *latest development version* of Iris. + # Installation diff --git a/docs/iris/example_code/Oceanography/load_nemo.py b/docs/iris/example_code/Oceanography/load_nemo.py new file mode 100644 index 0000000000..a76da68248 --- /dev/null +++ b/docs/iris/example_code/Oceanography/load_nemo.py @@ -0,0 +1,59 @@ +""" +Load a time series of data from the NEMO model +============================================== + +This example demonstrates how to load multiple files containing data output by +the NEMO model and combine them into a time series in a single cube. The +different time dimensions in these files can prevent Iris from concatenating +them without the intervention shown here. +""" +from __future__ import unicode_literals + +import iris +import iris.plot as iplt +import iris.quickplot as qplt +import matplotlib.pyplot as plt +from iris.util import promote_aux_coord_to_dim_coord + + +def main(): + # Load the three files of sample NEMO data. + fname = iris.sample_data_path('NEMO/nemo_1m_*.nc') + cubes = iris.load(fname) + + # Some attributes are unique to each file and must be blanked + # to allow concatenation. + differing_attrs = ['file_name', 'name', 'timeStamp', 'TimeStamp'] + for cube in cubes: + for attribute in differing_attrs: + cube.attributes[attribute] = '' + + # The cubes still cannot be concatenated because their time dimension is + # time_counter rather than time. time needs to be promoted to allow + # concatenation. + for cube in cubes: + promote_aux_coord_to_dim_coord(cube, 'time') + + # The cubes can now be concatenated into a single time series. + cube = cubes.concatenate_cube() + + # Generate a time series plot of a single point + plt.figure() + y_point_index = 100 + x_point_index = 100 + qplt.plot(cube[:, y_point_index, x_point_index], 'o-') + + # Include the point's position in the plot's title + lat_point = cube.coord('latitude').points[y_point_index, x_point_index] + lat_string = '{:.3f}\u00B0 {}'.format(abs(lat_point), + 'N' if lat_point > 0. else 'S') + lon_point = cube.coord('longitude').points[y_point_index, x_point_index] + lon_string = '{:.3f}\u00B0 {}'.format(abs(lon_point), + 'E' if lon_point > 0. else 'W') + plt.title('{} at {} {}'.format(cube.long_name.capitalize(), + lat_string, lon_string)) + iplt.show() + + +if __name__ == '__main__': + main() diff --git a/docs/iris/example_tests/test_load_nemo.py b/docs/iris/example_tests/test_load_nemo.py new file mode 100644 index 0000000000..13785609e2 --- /dev/null +++ b/docs/iris/example_tests/test_load_nemo.py @@ -0,0 +1,41 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import Iris tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from .extest_util import (add_examples_to_path, + show_replaced_by_check_graphic, + fail_any_deprecation_warnings) + + +class TestLoadNemo(tests.GraphicsTest): + """Test the load_nemo example code.""" + def test_load_nemo(self): + with fail_any_deprecation_warnings(): + with add_examples_to_path(): + import load_nemo + with show_replaced_by_check_graphic(self): + load_nemo.main() + + +if __name__ == '__main__': + tests.main() diff --git a/docs/iris/src/IEP/IEP001.adoc b/docs/iris/src/IEP/IEP001.adoc new file mode 100644 index 0000000000..d38b2e8478 --- /dev/null +++ b/docs/iris/src/IEP/IEP001.adoc @@ -0,0 +1,193 @@ +# IEP 1 - Enhanced indexing + +## Background + +Currently, to select a subset of a Cube based on coordinate values we use something like: +[source,python] +---- +cube.extract(iris.Constraint(realization=3, + model_level_number=[1, 5], + latitude=lambda cell: 40 <= cell <= 60)) +---- +On the plus side, this works irrespective of the dimension order of the data, but the drawbacks with this form of indexing include: + +* It uses a completely different syntax to position-based indexing, e.g. `cube[4, 0:6]`. +* It uses a completely different syntax to pandas and xarray value-based indexing, e.g. `df.loc[4, 0:6]`. +* It is long-winded and requires the use of an additional class. +* It requires the use of lambda functions even when just selecting a range. + +Arguably, the situation when subsetting using positional indices but where the dimension order is unknown is even worse - it has no standard syntax _at all_! Instead it requires code akin to: +[source,python] +---- +key = [slice(None)] * cube.ndim +key[cube.coord_dims('model_level_number')[0]] = slice(3, 9, 2) +cube[tuple(key)] +---- + +The only form of indexing that is well supported is indexing by position where the dimension order is known: +[source,python] +---- +cube[4, 0:6, 30:] +---- + +## Proposal + +Provide indexing helpers on the Cube to extend explicit support to all permutations of: + +* implicit dimension vs. named coordinate, +* and positional vs. coordinate-value based selection. + +### Helper syntax options + +Commonly, the names of coordinates are also valid Python identifiers. +For names where this is not true, the names can expressed through either the `helper[...]` or `helper(...)` syntax by constructing an explicit dict. +For example: `cube.loc[{'12': 0}]` or `cube.loc(**{'12': 0})`. + +#### Extended pandas style + +Use a single helper for index by position, and a single helper for index by value. Helper names taken from pandas, but their behaviour is extended by making them callable to support named coordinates. + +|=== +.2+| 2+h|Index by +h|Position h|Value + +h|Implicit dimension + +a|[source,python] +---- +cube[:, 2] # No change +cube.iloc[:, 2] +---- + +a|[source,python] +---- +cube.loc[:, 1.5] +---- + +h|Coordinate name + +a|[source,python] +---- +cube[dict(height=2)] +cube.iloc[dict(height=2)] +cube.iloc(height=2) +---- + +a|[source,python] +---- +cube.loc[dict(height=1.5)] +cube.loc(height=1.5) +---- +|=== + +#### xarray style + +xarray introduces a second set of helpers for accessing named dimensions that provide the callable syntax `(foo=...)`. + +|=== +.2+| 2+h|Index by +h|Position h|Value + +h|Implicit dimension + +a|[source,python] +---- +cube[:, 2] # No change +---- + +a|[source,python] +---- +cube.loc[:, 1.5] +---- + +h|Coordinate name + +a|[source,python] +---- + cube[dict(height=2)] + cube.isel(height=2) +---- + +a|[source,python] +---- +cube.loc[dict(height=1.5)] +cube.sel(height=1.5) +---- +|=== + +### Slices + +The semantics of position-based slices will continue to match that of normal Python slices. The start position is included, the end position is excluded. + +Value-based slices will be stricly inclusive, with both the start and end values included. This behaviour differs from normal Python slices but is in common with pandas. + +Just as for normal Python slices, we do not need to provide the ability to control the include/exclude behaviour for slicing. + +### Value-based indexing + +#### Equality + +Should the behaviour of value-based equality depend on the data type of the coordinate? + +* integer: exact match +* float: tolerance match, tolerance determined by bit-width +* string: exact match + +#### Scalar/category + +If/how to deal with category selection `cube.loc(season='JJA')`? Defer to `groupby()`? + +`cube.loc[12]` - must always match a single value or raise KeyError, corresponding dimension will be removed +`cube.loc[[12]]` - may match any number of values? (incl. zero?), dimension will be retained + +### Out of scope + +* Deliberately enhancing the performance. +This is a very valuable topic and should be addressed by subsequent efforts. + +* Time/date values as strings. +Providing pandas-style string representations for convenient representation of partial date/times should be addressed in a subsequent effort - perhaps in conjunction with an explicit performance test suite. +There is a risk that this topic could bog down when dealing with non-standard calendars and climatological date ranges. + +## Work required + +* Implementations for each of the new helper objects. +* An update to the documentation to demonstrate best practice. Known impacted areas include: +** The "Subsetting a Cube" chapter of the user guide. + +### TODO +* Multi-dimensional coordinates +* Non-orthogonal coordinates +* Bounds +* Boolean array indexing +* Lambdas? +* What to do about constrained loading? +* Relationship to http://scitools.org.uk/iris/docs/v1.9.2/iris/iris/cube.html#iris.cube.Cube.intersection[iris.cube.Cube.intersection]? +* Relationship to interpolation (especially nearest-neighbour)? +** e.g. What to do about values that don't exist? +*** pandas throws a KeyError +*** xarray supports (several) nearest-neighbour schemes via http://xarray.pydata.org/en/stable/indexing.html#nearest-neighbor-lookups[`data.sel()`] +*** Apparently http://holoviews.org/[holoviews] does nearest-neighbour interpolation. +* multi-dimensional coordinate => unroll? +* var_name only selection? `cube.vloc(t0=12)` +* Orthogonal only? Or also independent? `cube.loc_points(lon=[1, 1, 5], lat=[31, 33, 32])` + ** This seems quite closely linked to interpolation. Is the interpolation scheme orthogonal to cross-product vs. independent? ++ +[source,python] +---- +cube.interpolate( + scheme='nearest', + mesh=dict(lon=[5, 10, 15], lat=[40, 50])) +cube.interpolate( + scheme=Nearest(mode='spherical'), + locations=Ortho(lon=[5, 10, 15], lat=[40, 50])) +---- + +## References +. Iris + * http://scitools.org.uk/iris/docs/v1.9.2/iris/iris.html#iris.Constraint[iris.Constraint] + * http://scitools.org.uk/iris/docs/v1.9.2/userguide/subsetting_a_cube.html[Subsetting a cube] +. http://pandas.pydata.org/pandas-docs/stable/indexing.html[pandas indexing] +. http://xarray.pydata.org/en/stable/indexing.html[xarray indexing] +. http://legacy.python.org/dev/peps/pep-0472/[PEP 472 - Support for indexing with keyword arguments] +. http://nbviewer.jupyter.org/gist/rsignell-usgs/13d7ce9d95fddb4983d4cbf98be6c71d[Time slicing NetCDF or OPeNDAP datasets] - Rich Signell's xarray/iris comparison focussing on time handling and performance diff --git a/docs/iris/src/_static/Iris7_1_trim_100.png b/docs/iris/src/_static/Iris7_1_trim_100.png index b9267ced2c..330ee6e95d 100644 Binary files a/docs/iris/src/_static/Iris7_1_trim_100.png and b/docs/iris/src/_static/Iris7_1_trim_100.png differ diff --git a/docs/iris/src/_static/Iris7_1_trim_full.png b/docs/iris/src/_static/Iris7_1_trim_full.png index 3d65c28c9a..ac219de136 100644 Binary files a/docs/iris/src/_static/Iris7_1_trim_full.png and b/docs/iris/src/_static/Iris7_1_trim_full.png differ diff --git a/docs/iris/src/_static/favicon-16x16.png b/docs/iris/src/_static/favicon-16x16.png index 54a743670f..ea64d21a55 100644 Binary files a/docs/iris/src/_static/favicon-16x16.png and b/docs/iris/src/_static/favicon-16x16.png differ diff --git a/docs/iris/src/_static/favicon-32x32.png b/docs/iris/src/_static/favicon-32x32.png index e3fadd6c65..9270dd6a99 100644 Binary files a/docs/iris/src/_static/favicon-32x32.png and b/docs/iris/src/_static/favicon-32x32.png differ diff --git a/docs/iris/src/_static/logo_banner.png b/docs/iris/src/_static/logo_banner.png index 132ba19c22..4bec22f5dc 100644 Binary files a/docs/iris/src/_static/logo_banner.png and b/docs/iris/src/_static/logo_banner.png differ diff --git a/docs/iris/src/_templates/index.html b/docs/iris/src/_templates/index.html index 31acded447..c18f0268fa 100644 --- a/docs/iris/src/_templates/index.html +++ b/docs/iris/src/_templates/index.html @@ -134,7 +134,7 @@ extra information on specific technical issues

  • -
  • diff --git a/docs/iris/src/_templates/layout.html b/docs/iris/src/_templates/layout.html index 8ecc35bade..f854455f71 100644 --- a/docs/iris/src/_templates/layout.html +++ b/docs/iris/src/_templates/layout.html @@ -37,7 +37,7 @@

    - Iris v2.2 + Iris v3.0

    A powerful, format-agnostic, community-driven Python library for analysing and diff --git a/docs/iris/src/userguide/interpolation_and_regridding.rst b/docs/iris/src/userguide/interpolation_and_regridding.rst index e3cd622541..565f9b61eb 100644 --- a/docs/iris/src/userguide/interpolation_and_regridding.rst +++ b/docs/iris/src/userguide/interpolation_and_regridding.rst @@ -185,8 +185,8 @@ For example, to mask values that lie beyond the range of the original data: >>> scheme = iris.analysis.Linear(extrapolation_mode='mask') >>> new_column = column.interpolate(sample_points, scheme) >>> print(new_column.coord('altitude').points) - [ nan 494.44452 588.8889 683.33325 777.77783 872.2222 - 966.66675 1061.1111 1155.5554 nan] + [-- 494.44451904296875 588.888916015625 683.333251953125 777.77783203125 + 872.2222290039062 966.666748046875 1061.111083984375 1155.555419921875 --] .. _caching_an_interpolator: diff --git a/docs/iris/src/userguide/loading_iris_cubes.rst b/docs/iris/src/userguide/loading_iris_cubes.rst index c2ba575838..2cb3b9b259 100644 --- a/docs/iris/src/userguide/loading_iris_cubes.rst +++ b/docs/iris/src/userguide/loading_iris_cubes.rst @@ -52,7 +52,12 @@ The ``air_potential_temperature`` cubes were 4 dimensional with: The result of :func:`iris.load` is **always** a :class:`list of cubes `. Anything that can be done with a Python :class:`list` can be done - with the resultant list of cubes. + with the resultant list of cubes. It is worth noting, however, that + there is no inherent order to this + :class:`list of cubes `. + Because of this, indexing may be inconsistent. A more consistent way to + extract a cube is by using the :class:`iris.Constraint` class as + described in :ref:`constrained-loading`. .. hint:: @@ -146,6 +151,8 @@ This is referred to as 'lazy' data. It allows loading to be much quicker, and t For more on the benefits, handling and uses of lazy data, see :doc:`Real and Lazy Data `. +.. _constrained-loading: + Constrained loading ----------------------- Given a large dataset, it is possible to restrict or constrain the load @@ -258,7 +265,7 @@ However, when constraining by time we usually want to test calendar-related aspects such as hours of the day or months of the year, so Iris provides special features to facilitate this: -Firstly, when Iris evaluates Constraint expressions, it will convert time-coordinate +Firstly, when Iris evaluates Constraint expressions, it will convert time-coordinate values (points and bounds) from numbers into :class:`~datetime.datetime`-like objects for ease of calendar-based testing. @@ -286,25 +293,26 @@ then test only those 'aspects' which the PartialDateTime instance defines: True >>> print(dt > PartialDateTime(month=6)) False - >>> + >>> These two facilities can be combined to provide straightforward calendar-based time selections when loading or extracting data. The previous constraint example can now be written as: - >>> the_11th_hour = iris.Constraint(time=iris.time.PartialDateTime(hour=11)) - >>> print(iris.load_cube( - ... iris.sample_data_path('uk_hires.pp'), - ... 'air_potential_temperature' & the_11th_hour).coord('time')) - DimCoord([2009-11-19 11:00:00], standard_name='time', calendar='gregorian') - -A more complex example might be when there exists a time sequence representing the first day of every week -for many years: + >>> the_11th_hour = iris.Constraint(time=iris.time.PartialDateTime(hour=11)) + >>> print(iris.load_cube( + ... iris.sample_data_path('uk_hires.pp'), + ... 'air_potential_temperature' & the_11th_hour).coord('time')) + DimCoord([2009-11-19 11:00:00], standard_name='time', calendar='gregorian') +It is common that a cube will need to be constrained between two given dates. +In the following example we construct a time sequence representing the first +day of every week for many years: .. testsetup:: timeseries_range + import datetime import numpy as np from iris.time import PartialDateTime long_ts = iris.cube.Cube(np.arange(150), long_name='data', units='1') @@ -314,21 +322,53 @@ for many years: .. doctest:: timeseries_range :options: +NORMALIZE_WHITESPACE, +ELLIPSIS - + >>> print(long_ts.coord('time')) DimCoord([2007-04-09 00:00:00, 2007-04-16 00:00:00, 2007-04-23 00:00:00, ... 2010-02-01 00:00:00, 2010-02-08 00:00:00, 2010-02-15 00:00:00], standard_name='time', calendar='gregorian') -We can select points within a certain part of the year, in this case between -the 15th of July through to the 25th of August, by combining the datetime cell -functionality with PartialDateTime: +Given two dates in datetime format, we can select all points between them. + +.. doctest:: timeseries_range + :options: +NORMALIZE_WHITESPACE, +ELLIPSIS + + >>> d1 = datetime.datetime.strptime('20070715T0000Z', '%Y%m%dT%H%MZ') + >>> d2 = datetime.datetime.strptime('20070825T0000Z', '%Y%m%dT%H%MZ') + >>> st_swithuns_daterange_07 = iris.Constraint( + ... time=lambda cell: d1 <= cell.point < d2) + >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) + >>> print(within_st_swithuns_07.coord('time')) + DimCoord([2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00], + standard_name='time', calendar='gregorian') + +Alternatively, we may rewrite this using :class:`iris.time.PartialDateTime` +objects. + +.. doctest:: timeseries_range + :options: +NORMALIZE_WHITESPACE, +ELLIPSIS + + >>> pdt1 = PartialDateTime(year=2007, month=7, day=15) + >>> pdt2 = PartialDateTime(year=2007, month=8, day=25) + >>> st_swithuns_daterange_07 = iris.Constraint( + ... time=lambda cell: pdt1 <= cell.point < pdt2) + >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) + >>> print(within_st_swithuns_07.coord('time')) + DimCoord([2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00], + standard_name='time', calendar='gregorian') + +A more complex example might require selecting points over an annually repeating +date range. We can select points within a certain part of the year, in this case +between the 15th of July through to the 25th of August. By making use of +PartialDateTime this becomes simple: .. doctest:: timeseries_range >>> st_swithuns_daterange = iris.Constraint( - ... time=lambda cell: PartialDateTime(month=7, day=15) < cell < PartialDateTime(month=8, day=25)) + ... time=lambda cell: PartialDateTime(month=7, day=15) <= cell < PartialDateTime(month=8, day=25)) >>> within_st_swithuns = long_ts.extract(st_swithuns_daterange) ... >>> print(within_st_swithuns.coord('time')) diff --git a/docs/iris/src/whatsnew/2.3.rst b/docs/iris/src/whatsnew/2.3.rst new file mode 100644 index 0000000000..c5a6060146 --- /dev/null +++ b/docs/iris/src/whatsnew/2.3.rst @@ -0,0 +1,215 @@ +What's New in Iris 2.3.0 +************************ + +:Release: 2.3.0 +:Date: 2019-10-04 + +This document explains the new/changed features of Iris in version 2.3.0 +(:doc:`View all changes `.) + +Iris 2.3.0 Features +=================== +.. _showcase: + +.. admonition:: Increased Support for CF 1.7 + + We have introduced several changes that contribute to Iris's support for + the CF Conventions, including some CF 1.7 additions. We are now able to + support: + + * :ref:`Climatological Coordinates` + * :ref:`Standard name modifiers` + * :ref:`Geostationary projection` + + You can read more about each of these below. + + Additionally, the conventions attribute, added by Iris when saving to + NetCDF, has been updated to "CF-1.7", accordingly. + +.. _climatological: +.. admonition:: Climatological Coordinate Support + + Iris can now load, store and save `NetCDF climatological coordinates + `_. Any cube time + coordinate can be marked as a climatological time axis using the boolean + property: ``climatological``. The climatological bounds are stored in the + coordinate's ``bounds`` property. + + When an Iris climatological coordinate is saved in NetCDF, the NetCDF + coordinate variable will be given a 'climatology' attribute, and the + contents of the + coordinate's ``bounds`` property are written to a NetCDF boundary variable + called '_bounds'. These are in place of a standard + 'bounds' attribute and accompanying boundary variable. See below + for an `example adapted from CF conventions `_: + + .. code-block:: none + + dimensions: + time=4; + bnds=2; + variables: + float temperature(time,lat,lon); + temperature:long_name="surface air temperature"; + temperature:cell_methods="time: minimum within years time: mean over years"; + temperature:units="K"; + double time(time); + time:climatology="time_climatology"; + time:units="days since 1960-1-1"; + double time_climatology(time,bnds); + data: // time coordinates translated to date/time format + time="1960-4-16", "1960-7-16", "1960-10-16", "1961-1-16" ; + time_climatology="1960-3-1", "1990-6-1", + "1960-6-1", "1990-9-1", + "1960-9-1", "1990-12-1", + "1960-12-1", "1991-3-1" ; + + If a climatological time axis is detected when loading NetCDF - + indicated by the format described above - the ``climatological`` property + of the Iris coordinate will be set to ``True``. + +.. admonition:: New Chunking Strategy + + Iris now makes better choices of Dask chunk sizes when loading from NetCDF + files: If a file variable has small, specified chunks, Iris will now choose + Dask chunks which are a multiple of these up to a default target size. + + This is particularly relevant to files with an unlimited dimension, which + previously could produce a large number of small chunks. This had an adverse + effect on performance. + + In addition, Iris now takes its default chunksize from the default configured + in Dask itself, i.e. ``dask.config.get('array.chunk-size')``. + +.. admonition:: Lazy Statistics + + Several statistical operations can now be done lazily, taking advantage of the + performance improvements offered by Dask: + + * :meth:`~iris.cube.Cube.aggregated_by` + * :class:`~iris.analysis.RMS` (more detail below) + * :class:`~iris.analysis.MEAN` + +---- + +.. _geostationary: +.. _standard_name: +.. _conventions_1.7: + +* Cube data equality testing (and hence cube equality) now uses a more + relaxed + tolerance : This means that some cubes may now test 'equal' that previously + did not. + Previously, Iris compared cube data arrays using: + ``abs(a - b) < 1.e-8`` + + We now apply the default operation of :func:`numpy.allclose` instead, + which is equivalent to: + ``abs(a - b) < (1.e-8 + 1.e-5 * b)`` + +* Added support to render HTML for :class:`~iris.cube.CubeList` in Jupyter + Notebooks and JupyterLab. +* Loading CellMeasures with integer values is now supported. +* New coordinate system: :class:`iris.coord_systems.Geostationary`, + including load and save support, based on the `CF Geostationary projection + definition `_. +* :class:`iris.coord_systems.VerticalPerspective` can now be saved to and + loaded from NetCDF files. +* :class:`iris.experimental.regrid.PointInCell` moved to + :class:`iris.analysis.PointInCell` to make this regridding scheme public +* Iris now supports standard name modifiers. See `Appendix C, Standard Name Modifiers `_ for more information. +* :meth:`iris.cube.Cube.remove_cell_measure` now also allows removal of a cell + measure by its name (previously only accepted a CellMeasure object). +* The :data:`iris.analysis.RMS` aggregator now supports a lazy calculation. + However, the "weights" keyword is not currently supported by this, so a + *weighted* calculation will still return a realised result, *and* force + realisation of the original cube data. +* Iris now supports NetCDF Climate and Forecast (CF) Metadata Conventions 1.7 (see `CF 1.7 Conventions Document `_ for more information) + + +Iris 2.3.0 Dependency Updates +============================= +* Iris now supports Proj4 up to version 5, but not yet 6 or beyond, pending + `fixes to some cartopy tests `_. +* Iris now requires Dask >= 1.2 to allow for improved coordinate equality + checks. + + +Bugs Fixed +========== +* Cube equality of boolean data is now handled correctly. +* Fixed a bug where cell measures were incorrect after a cube + :meth:`~iris.cube.Cube.transpose` operation. Previously, this resulted in + cell-measures that were no longer correctly mapped to the cube dimensions. +* The :class:`~iris.coords.AuxCoord` disregarded masked points and bounds, as did the :class:`~iris.coords.DimCoord`. + Fix permits an :class:`~iris.coords.AuxCoord` to contain masked points/bounds, and a TypeError exception is now + raised when attempting to create or set the points/bounds of a + :class:`~iris.coords.DimCoord` with arrays with missing points. +* :class:`iris.coord_systems.VerticalPerspective` coordinate system now uses + the `CF Vertical perspective definition `_; had been + erroneously using Geostationary. +* :class:`~iris.coords.CellMethod` will now only use valid `NetCDF name tokens `_ to reference the coordinates involved in the statistical operation. +* The following var_name properties will now only allow valid `NetCDF name + tokens + `_ to + reference the said NetCDF variable name. Note that names with a leading + underscore are not permitted. + - :attr:`iris.aux_factory.AuxCoordFactory.var_name` + - :attr:`iris.coords.CellMeasure.var_name` + - :attr:`iris.coords.Coord.var_name` + - :attr:`iris.coords.AuxCoord.var_name` + - :attr:`iris.cube.Cube.var_name` +* Rendering a cube in Jupyter will no longer crash for a cube with + attributes containing ``\n``. +* NetCDF variables which reference themselves in their ``cell_measures`` + attribute can now be read. +* :func:`~iris.plot.quiver` now handles circular coordinates. +* The names of cubes loaded from abf/abl files have been corrected. +* Fixed a bug in UM file loading, where any landsea-mask-compressed fields + (i.e. with LBPACK=x2x) would cause an error later, when realising the data. +* :meth:`iris.cube.Cube.collapsed` now handles partial collapsing of + multidimensional coordinates that have bounds. +* Fixed a bug in the :data:`~iris.analysis.PROPORTION` aggregator, where cube + data in the form of a masked array with ``array.mask=False`` would cause an + error, but possibly only later when the values are actually realised. + ( Note: since netCDF4 version 1.4.0, this is now a common form for data + loaded from netCDF files ). +* Fixed a bug where plotting a cube with a + :class:`iris.coord_systems.LambertConformal` coordinate system would result + in an error. This would happen if the coordinate system was defined with one + standard parallel, rather than two. + In these cases, a call to + :meth:`~iris.coord_systems.LambertConformal.as_cartopy_crs` would fail. +* :meth:`iris.cube.Cube.aggregated_by` now gives correct values in points and + bounds when handling multidimensional coordinates. +* Fixed a bug in the :meth:`iris.cube.Cube.collapsed` operation, which caused + the unexpected realization of any attached auxiliary coordinates that were + *bounded*. It now correctly produces a lazy result and does not realise + the original attached AuxCoords. + + +Documentation Changes +===================== +* Added a gallery example showing `how to concatenate NEMO ocean model data + <../examples/Oceanography/load_nemo.html>`_. +* Added an example in the + `Loading Iris Cubes: Constraining on Time <../userguide/loading_iris_cubes + .html#constraining-on-time>`_ + Userguide section, demonstrating how to load data within a specified date + range. +* Added notes to the :func:`iris.load` documentation, and the userguide + `Loading Iris Cubes <../userguide/loading_iris_cubes.html>`_ + chapter, emphasizing that the *order* of the cubes returned by an iris load + operation is effectively random and unstable, and should not be relied on. +* Fixed references in the documentation of + :func:`iris.util.find_discontiguities` to a nonexistent + "mask_discontiguities" routine : these now refer to + :func:`~iris.util.mask_cube`. + diff --git a/docs/iris/src/whatsnew/aggregate_directory.py b/docs/iris/src/whatsnew/aggregate_directory.py index 88290907ef..fca098f4d4 100644 --- a/docs/iris/src/whatsnew/aggregate_directory.py +++ b/docs/iris/src/whatsnew/aggregate_directory.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2015 - 2016, Met Office +# (C) British Crown Copyright 2015 - 2019, Met Office # # This file is part of Iris. # @@ -166,13 +166,16 @@ def find_release_directory(root_directory, release=None, return result -def generate_header(release): +def generate_header(release, unreleased=False): '''Return a list of text lines that make up a header for the document.''' + if unreleased: + isodatestamp = '' + else: + isodatestamp = datetime.date.today().strftime('%Y-%m-%d') header_text = [] title_template = 'What\'s New in {} {!s}\n' title_line = title_template.format(SOFTWARE_NAME, release) title_underline = ('*' * (len(title_line) - 1)) + '\n' - isodatestamp = datetime.date.today().strftime('%Y-%m-%d') header_text.append(title_line) header_text.append(title_underline) header_text.append('\n') @@ -215,11 +218,13 @@ def read_directory(directory_path): return compilable_files -def compile_directory(directory, release): +def compile_directory(directory, release, unreleased=False): '''Read in source files in date order and compile the text into a list.''' + if unreleased: + release = '' source_text = read_directory(directory) compiled_text = [] - header_text = generate_header(release) + header_text = generate_header(release, unreleased) compiled_text.extend(header_text) for count, category in enumerate(VALID_CATEGORIES): category_text = [] @@ -242,11 +247,12 @@ def compile_directory(directory, release): if not text[-1].endswith('\n'): text[-1] += '\n' category_text.extend(text) + category_text.append('\n----\n\n') compiled_text.extend(category_text) return compiled_text -def check_all_contributions_valid(release=None, quiet=False): +def check_all_contributions_valid(release=None, quiet=False, unreleased=False): """"Scan the contributions directory for badly-named files.""" root_directory = _self_root_directory() # Check there are *some* contributions directory(s), else silently pass. @@ -263,12 +269,12 @@ def check_all_contributions_valid(release=None, quiet=False): # Run the directory scan, but convert any warning into an error. with warnings.catch_warnings(): warnings.simplefilter('error') - compile_directory(release_directory, release) + compile_directory(release_directory, release, unreleased) if not quiet: print('done.') -def run_compilation(release=None, quiet=False): +def run_compilation(release=None, quiet=False, unreleased=False): '''Write a draft release.rst file given a specified uncompiled release.''' if release is None: # This must exist ! @@ -278,8 +284,11 @@ def run_compilation(release=None, quiet=False): print(msg.format(release)) root_directory = _self_root_directory() release_directory = find_release_directory(root_directory, release) - compiled_text = compile_directory(release_directory, release) - compiled_filename = str(release) + EXTENSION + compiled_text = compile_directory(release_directory, release, unreleased) + if unreleased: + compiled_filename = 'latest' + EXTENSION + else: + compiled_filename = str(release) + EXTENSION compiled_filepath = os.path.join(root_directory, compiled_filename) with open(compiled_filepath, 'w') as output_object: for string_line in compiled_text: @@ -295,13 +304,19 @@ def run_compilation(release=None, quiet=False): PARSER.add_argument( '-c', '--checkonly', action='store_true', help="Check contribution file names, do not build.") + PARSER.add_argument( + '-u', '--unreleased', action='store_true', + help=("Label the release version as '', " + "and its date as ''.")) PARSER.add_argument( '-q', '--quiet', action='store_true', help="Do not print progress messages.") ARGUMENTS = PARSER.parse_args() release = ARGUMENTS.release + unreleased = ARGUMENTS.unreleased quiet = ARGUMENTS.quiet if ARGUMENTS.checkonly: - check_all_contributions_valid(release, quiet=quiet) + check_all_contributions_valid(release, quiet=quiet, + unreleased=unreleased) else: - run_compilation(release, quiet=quiet) + run_compilation(release, quiet=quiet, unreleased=unreleased) diff --git a/docs/iris/src/whatsnew/contributions_2.3.0/bugfix_2018-Sep-17_aggregate_by_multdim_coords.txt b/docs/iris/src/whatsnew/contributions_2.3.0/bugfix_2018-Sep-17_aggregate_by_multdim_coords.txt deleted file mode 100644 index 4a2181ec2d..0000000000 --- a/docs/iris/src/whatsnew/contributions_2.3.0/bugfix_2018-Sep-17_aggregate_by_multdim_coords.txt +++ /dev/null @@ -1,2 +0,0 @@ -* :meth:`iris.cube.Cube.aggregated_by` now gives correct values in points and -bounds when handling multidimensional coordinates. \ No newline at end of file diff --git a/docs/iris/src/whatsnew/index.rst b/docs/iris/src/whatsnew/index.rst index c3a34303f0..179216ccb5 100644 --- a/docs/iris/src/whatsnew/index.rst +++ b/docs/iris/src/whatsnew/index.rst @@ -9,6 +9,9 @@ Iris versions. .. toctree:: :maxdepth: 2 + latest.rst + 3.0.rst + 2.3.rst 2.2.rst 2.1.rst 2.0.rst diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 29a175f31d..1a1a6dc364 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -121,7 +121,7 @@ def callback(cube, field, filename): # Iris revision. -__version__ = '2.3.0dev0' +__version__ = '3.0.dev0' # Restrict the names imported when using "from iris import *" __all__ = ['load', 'load_cube', 'load_cubes', 'load_raw', @@ -337,7 +337,9 @@ def load(uris, constraints=None, callback=None): A modifier/filter function. Returns: - An :class:`iris.cube.CubeList`. + An :class:`iris.cube.CubeList`. Note that there is no inherent order + to this :class:`iris.cube.CubeList` and it should be treated as if it + were random. """ return _load_collection(uris, constraints, callback).merged().cubes() @@ -370,8 +372,7 @@ def load_cube(uris, constraint=None, callback=None): if len(constraints) != 1: raise ValueError('only a single constraint is allowed') - cubes = _load_collection(uris, constraints, callback) - cubes = cubes.merged().cubes() + cubes = _load_collection(uris, constraints, callback).cubes() try: cube = cubes.merge_cube() @@ -403,7 +404,9 @@ def load_cubes(uris, constraints=None, callback=None): A modifier/filter function. Returns: - An :class:`iris.cube.CubeList`. + An :class:`iris.cube.CubeList`. Note that there is no inherent order + to this :class:`iris.cube.CubeList` and it should be treated as if it + were random. """ # Merge the incoming cubes diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 3b212bf6f2..18b7fb1f54 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2017, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -23,7 +23,10 @@ from six.moves import (filter, input, map, range, zip) # noqa import six -import collections +try: # Python 3 + from collections.abc import Iterable, Mapping +except ImportError: # Python 2.7 + from collections import Iterable, Mapping import operator import numpy as np @@ -98,7 +101,7 @@ def __init__(self, name=None, cube_func=None, coord_values=None, **kwargs): raise TypeError('cube_func must be None or callable, got %r' % cube_func) if not (coord_values is None or isinstance(coord_values, - collections.Mapping)): + Mapping)): raise TypeError('coord_values must be None or a ' 'collections.Mapping, got %r' % coord_values) @@ -258,7 +261,7 @@ def extract(self, cube): try_quick = False if callable(self._coord_thing): call_func = self._coord_thing - elif (isinstance(self._coord_thing, collections.Iterable) and + elif (isinstance(self._coord_thing, Iterable) and not isinstance(self._coord_thing, (six.string_types, iris.coords.Cell))): desired_values = list(self._coord_thing) diff --git a/lib/iris/_cube_coord_common.py b/lib/iris/_cube_coord_common.py index 9eba266ff6..6225b6f64c 100644 --- a/lib/iris/_cube_coord_common.py +++ b/lib/iris/_cube_coord_common.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -19,8 +19,7 @@ from six.moves import (filter, input, map, range, zip) # noqa import six -# TODO: Is this a mixin or a base class? - +import re import string import cf_units @@ -28,6 +27,39 @@ import iris.std_names +# https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name +_TOKEN_PARSE = re.compile(r'''^[a-zA-Z0-9][\w\.\+\-@]*$''') + + +def get_valid_standard_name(name): + # Standard names are optionally followed by a standard name + # modifier, separated by one or more blank spaces + + if name is not None: + name_is_valid = False + # Supported standard name modifiers. Ref: [CF] Appendix C. + valid_std_name_modifiers = ['detection_minimum', + 'number_of_observations', + 'standard_error', + 'status_flag'] + + valid_name_pattern = re.compile(r'''^([a-zA-Z_]+)( *)([a-zA-Z_]*)$''') + name_groups = valid_name_pattern.match(name) + + if name_groups: + std_name, whitespace, std_name_modifier = name_groups.groups() + if (std_name in iris.std_names.STD_NAMES) and ( + bool(whitespace) == (std_name_modifier in + valid_std_name_modifiers)): + name_is_valid = True + + if name_is_valid is False: + raise ValueError('{!r} is not a valid standard_name'.format( + name)) + + return name + + class LimitedAttributeDict(dict): _forbidden_keys = ('standard_name', 'long_name', 'units', 'bounds', 'axis', 'calendar', 'leap_month', 'leap_year', 'month_lengths', @@ -84,7 +116,30 @@ def update(self, other, **kwargs): class CFVariableMixin(object): - def name(self, default='unknown'): + + _DEFAULT_NAME = 'unknown' # the name default string + + @staticmethod + def token(name): + ''' + Determine whether the provided name is a valid NetCDF name and thus + safe to represent a single parsable token. + + Args: + + * name: + The string name to verify + + Returns: + The provided name if valid, otherwise None. + + ''' + if name is not None: + result = _TOKEN_PARSE.match(name) + name = result if result is None else name + return name + + def name(self, default=None, token=False): """ Returns a human-readable name. @@ -92,9 +147,35 @@ def name(self, default='unknown'): 'var_name', then the STASH attribute before falling back to the value of `default` (which itself defaults to 'unknown'). + Kwargs: + + * default: + The value of the default name. + * token: + If true, ensure that the name returned satisfies the criteria for + the characters required by a valid NetCDF name. If it is not + possible to return a valid name, then a ValueError exception is + raised. + + Returns: + String. + """ - return self.standard_name or self.long_name or self.var_name or \ - str(self.attributes.get('STASH', '')) or default + def _check(item): + return self.token(item) if token else item + + default = self._DEFAULT_NAME if default is None else default + + result = (_check(self.standard_name) or _check(self.long_name) or + _check(self.var_name) or + _check(str(self.attributes.get('STASH', ''))) or + _check(default)) + + if token and result is None: + emsg = 'Cannot retrieve a valid name token from {!r}' + raise ValueError(emsg.format(self)) + + return result def rename(self, name): """ @@ -122,10 +203,7 @@ def standard_name(self): @standard_name.setter def standard_name(self, name): - if name is None or name in iris.std_names.STD_NAMES: - self._standard_name = name - else: - raise ValueError('%r is not a valid standard_name' % name) + self._standard_name = get_valid_standard_name(name) @property def units(self): @@ -144,12 +222,10 @@ def var_name(self): @var_name.setter def var_name(self, name): if name is not None: - if not name: - raise ValueError('An empty string is not a valid netCDF ' - 'variable name.') - elif set(name).intersection(string.whitespace): - raise ValueError('{!r} is not a valid netCDF variable name ' - 'as it contains whitespace.'.format(name)) + result = self.token(name) + if result is None or not name: + emsg = '{!r} is not a valid NetCDF variable name.' + raise ValueError(emsg.format(name)) self._var_name = name @property diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index f5312b7d3a..2de93824d8 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2017 - 2018, Met Office +# (C) British Crown Copyright 2017 - 2019, Met Office # # This file is part of Iris. # @@ -27,8 +27,9 @@ import dask import dask.array as da -import dask.context -from dask.local import get_sync as dget_sync +import dask.config +import dask.utils + import numpy as np import numpy.ma as ma @@ -58,26 +59,104 @@ def is_lazy_data(data): return result -# A magic value, chosen to minimise chunk creation time and chunk processing -# time within dask. -_MAX_CHUNK_SIZE = 8 * 1024 * 1024 * 2 +def _optimum_chunksize(chunks, shape, + limit=None, + dtype=np.dtype('f4')): + """ + Reduce or increase an initial chunk shape to get close to a chosen ideal + size, while prioritising the splitting of the earlier (outer) dimensions + and keeping intact the later (inner) ones. + Args: -def _limited_shape(shape): - # Reduce a shape to less than a default overall number-of-points, reducing - # earlier dimensions preferentially. - # Note: this is only a heuristic, assuming that earlier dimensions are - # 'outer' storage dimensions -- not *always* true, even for NetCDF data. - shape = list(shape) - i_reduce = 0 - while np.prod(shape) > _MAX_CHUNK_SIZE: - factor = np.ceil(np.prod(shape) / _MAX_CHUNK_SIZE) - new_dim = int(shape[i_reduce] / factor) - if new_dim < 1: - new_dim = 1 - shape[i_reduce] = new_dim - i_reduce += 1 - return tuple(shape) + * chunks (tuple of int, or None): + Pre-existing chunk shape of the target data : None if unknown. + * shape (tuple of int): + The full array shape of the target data. + * limit (int): + The 'ideal' target chunk size, in bytes. Default from dask.config. + * dtype (np.dtype): + Numpy dtype of target data. + + Returns: + * chunk (tuple of int): + The proposed shape of one full chunk. + + .. note:: + The purpose of this is very similar to + `dask.array.core.normalize_chunks`, when called as + `(chunks='auto', shape, dtype=dtype, previous_chunks=chunks, ...)`. + Except, the operation here is optimised specifically for a 'c-like' + dimension order, i.e. outer dimensions first, as for netcdf variables. + So if, in future, this policy can be implemented in dask, then we would + prefer to replace this function with a call to that one. + Accordingly, the arguments roughly match 'normalize_chunks', except + that we don't support the alternative argument forms of that routine. + The return value, however, is a single 'full chunk', rather than a + complete chunking scheme : so an equivalent code usage could be + "chunks = [c[0] for c in normalise_chunks('auto', ...)]". + + """ + # Set the chunksize limit. + if limit is None: + # Fetch the default 'optimal' chunksize from the dask config. + limit = dask.config.get('array.chunk-size') + # Convert to bytes + limit = dask.utils.parse_bytes(limit) + + point_size_limit = limit / dtype.itemsize + + # Create result chunks, starting with a copy of the input. + result = list(chunks) + + if np.prod(result) < point_size_limit: + # If size is less than maximum, expand the chunks, multiplying later + # (i.e. inner) dims first. + i_expand = len(shape) - 1 + while np.prod(result) < point_size_limit and i_expand >= 0: + factor = np.floor(point_size_limit * 1.0 / np.prod(result)) + new_dim = result[i_expand] * int(factor) + if new_dim >= shape[i_expand]: + # Clip to dim size : chunk dims must not exceed the full shape. + new_dim = shape[i_expand] + else: + # 'new_dim' is less than the relevant dim of 'shape' -- but it + # is also the largest possible multiple of the input-chunks, + # within the size limit. + # So : 'i_expand' is the outer (last) dimension over which we + # will multiply the input chunks, and 'new_dim' is a value that + # ensures the fewest possible chunks within that dim. + + # Now replace 'new_dim' with the value **closest to equal-size + # chunks**, for the same (minimum) number of chunks. + # More-equal chunks are practically better. + # E.G. : "divide 8 into multiples of 2, with a limit of 7", + # produces new_dim=6, which would mean chunks of sizes (6, 2). + # But (4, 4) is clearly better for memory and time cost. + + # Calculate how many (expanded) chunks fit into this dimension. + dim_chunks = np.ceil(shape[i_expand] * 1. / new_dim) + # Get "ideal" (equal) size for that many chunks. + ideal_equal_chunk_size = shape[i_expand] / dim_chunks + # Use the nearest whole multiple of input chunks >= ideal. + new_dim = int(result[i_expand] * + np.ceil(ideal_equal_chunk_size / + result[i_expand])) + + result[i_expand] = new_dim + i_expand -= 1 + else: + # Similarly, reduce if too big, reducing earlier (outer) dims first. + i_reduce = 0 + while np.prod(result) > point_size_limit: + factor = np.ceil(np.prod(result) / point_size_limit) + new_dim = int(result[i_reduce] / factor) + if new_dim < 1: + new_dim = 1 + result[i_reduce] = new_dim + i_reduce += 1 + + return tuple(result) def as_lazy_data(data, chunks=None, asarray=False): @@ -86,29 +165,41 @@ def as_lazy_data(data, chunks=None, asarray=False): Args: - * data: - An array. This will be converted to a dask array. + * data (array-like): + An indexable object with 'shape', 'dtype' and 'ndim' properties. + This will be converted to a dask array. Kwargs: - * chunks: - Describes how the created dask array should be split up. Defaults to a - value first defined in biggus (being `8 * 1024 * 1024 * 2`). - For more information see - http://dask.pydata.org/en/latest/array-creation.html#chunks. + * chunks (list of int): + If present, a source chunk shape, e.g. for a chunked netcdf variable. - * asarray: + * asarray (bool): If True, then chunks will be converted to instances of `ndarray`. Set to False (default) to pass passed chunks through unchanged. Returns: The input array converted to a dask array. + .. note:: + The result chunk size is a multiple of 'chunks', if given, up to the + dask default chunksize, i.e. `dask.config.get('array.chunk-size'), + or the full data shape if that is smaller. + If 'chunks' is not given, the result has chunks of the full data shape, + but reduced by a factor if that exceeds the dask default chunksize. + """ if chunks is None: - # Default to the shape of the wrapped array-like, - # but reduce it if larger than a default maximum size. - chunks = _limited_shape(data.shape) + # No existing chunks : Make a chunk the shape of the entire input array + # (but we will subdivide it if too big). + chunks = list(data.shape) + + # Adjust chunk size for better dask performance, + # NOTE: but only if no shape dimension is zero, so that we can handle the + # PPDataProxy of "raw" landsea-masked fields, which have a shape of (0, 0). + if all(elem > 0 for elem in data.shape): + # Expand or reduce the basic chunk shape to an optimum size. + chunks = _optimum_chunksize(chunks, shape=data.shape, dtype=data.dtype) if isinstance(data, ma.core.MaskedConstant): data = ma.masked_array(data.data, mask=data.mask) diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 0a5265e9ab..9613550929 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -50,7 +50,11 @@ from six.moves import (filter, input, map, range, zip) # noqa import six -import collections +from collections import OrderedDict +try: # Python 3 + from collections.abc import Iterable +except ImportError: # Python 2.7 + from collections import Iterable from functools import wraps import dask.array as da @@ -62,7 +66,7 @@ from iris.analysis._area_weighted import AreaWeightedRegridder from iris.analysis._interpolation import (EXTRAPOLATION_MODES, RectilinearInterpolator) -from iris.analysis._regrid import RectilinearRegridder +from iris.analysis._regrid import RectilinearRegridder, CurvilinearRegridder import iris.coords from iris.exceptions import LazyAggregatorError import iris._lazy_data @@ -715,7 +719,7 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): names = [coord.name() for coord in coords] coord_name = '{}_over_{}'.format(self.name(), '_'.join(names)) - if not isinstance(points, collections.Iterable): + if not isinstance(points, Iterable): points = [points] # Decorate a collapsed cube with a scalar additive coordinate @@ -764,7 +768,7 @@ def aggregate_shape(self, **kwargs): points = kwargs[self._args[0]] shape = () - if not isinstance(points, collections.Iterable): + if not isinstance(points, Iterable): points = [points] points = np.array(points) @@ -1089,7 +1093,7 @@ def _percentile(data, axis, percent, fast_percentile_method=False, # Ensure to unflatten any leading dimensions. if shape: - if not isinstance(percent, collections.Iterable): + if not isinstance(percent, Iterable): percent = [percent] percent = np.array(percent) # Account for the additive dimension. @@ -1207,7 +1211,7 @@ def _weighted_percentile(data, axis, weights, percent, returned=False, # Ensure to unflatten any leading dimensions. if shape: - if not isinstance(percent, collections.Iterable): + if not isinstance(percent, Iterable): percent = [percent] percent = np.array(percent) # Account for the additive dimension. @@ -1486,7 +1490,7 @@ def interp_order(length): MEAN = WeightedAggregator('mean', ma.average, - lazy_func=_build_dask_mdtol_function(da.mean)) + lazy_func=_build_dask_mdtol_function(da.ma.average)) """ An :class:`~iris.analysis.Aggregator` instance that calculates the mean over a :class:`~iris.cube.Cube`, as computed by @@ -1526,7 +1530,7 @@ def interp_order(length): .. note:: - Lazy operation is supported, via :func:`dask.array.nanmean`. + Lazy operation is supported, via :func:`dask.array.ma.average`. This aggregator handles masked data. @@ -1877,10 +1881,10 @@ def __init__(self, groupby_coords, shared_coords=None): self.coords = [] self._groupby_coords = [] self._shared_coords = [] - self._slices_by_key = collections.OrderedDict() + self._slices_by_key = OrderedDict() self._stop = None # Ensure group-by coordinates are iterable. - if not isinstance(groupby_coords, collections.Iterable): + if not isinstance(groupby_coords, Iterable): raise TypeError('groupby_coords must be a ' '`collections.Iterable` type.') @@ -1891,7 +1895,7 @@ def __init__(self, groupby_coords, shared_coords=None): # coordinates. if shared_coords is not None: # Ensure shared coordinates are iterable. - if not isinstance(shared_coords, collections.Iterable): + if not isinstance(shared_coords, Iterable): raise TypeError('shared_coords must be a ' '`collections.Iterable` type.') # Add valid shared coordinates. @@ -2556,3 +2560,67 @@ def regridder(self, src_cube, target_grid): from iris.analysis.trajectory import \ UnstructuredNearestNeigbourRegridder return UnstructuredNearestNeigbourRegridder(src_cube, target_grid) + + +class PointInCell(object): + """ + This class describes the point-in-cell regridding scheme for use + typically with :meth:`iris.cube.Cube.regrid()`. + + The PointInCell regridder can regrid data from a source grid of any + dimensionality and in any coordinate system. + The location of each source point is specified by X and Y coordinates + mapped over the same cube dimensions, aka "grid dimensions" : the grid may + have any dimensionality. The X and Y coordinates must also have the same, + defined coord_system. + The weights, if specified, must have the same shape as the X and Y + coordinates. + The output grid can be any 'normal' XY grid, specified by *separate* X + and Y coordinates : That is, X and Y have two different cube dimensions. + The output X and Y coordinates must also have a common, specified + coord_system. + + """ + def __init__(self, weights=None): + """ + Point-in-cell regridding scheme suitable for regridding over one + or more orthogonal coordinates. + + Optional Args: + + * weights: + A :class:`numpy.ndarray` instance that defines the weights + for the grid cells of the source grid. Must have the same shape + as the data of the source grid. + If unspecified, equal weighting is assumed. + + """ + self.weights = weights + + def regridder(self, src_grid, target_grid): + """ + Creates a point-in-cell regridder to perform regridding from the + source grid to the target grid. + + Typically you should use :meth:`iris.cube.Cube.regrid` for + regridding a cube. There are, however, some situations when + constructing your own regridder is preferable. These are detailed in + the :ref:`user guide `. + + Args: + + * src_grid: + The :class:`~iris.cube.Cube` defining the source grid. + * target_grid: + The :class:`~iris.cube.Cube` defining the target grid. + + Returns: + A callable with the interface: + + `callable(cube)` + + where `cube` is a cube with the same grid as `src_grid` + that is to be regridded to the `target_grid`. + + """ + return CurvilinearRegridder(src_grid, target_grid, self.weights) diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index 964f56d4f7..fb6c3ce8d0 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -391,7 +391,7 @@ def _resample_coord(self, sample_points, coord, coord_dims): # after the resampling. try: new_coord = coord.copy(new_points) - except ValueError: + except (ValueError, TypeError): aux_coord = AuxCoord.from_coord(coord) new_coord = aux_coord.copy(new_points) return new_coord diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 83561a2b94..f80d797642 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2014 - 2018, Met Office +# (C) British Crown Copyright 2014 - 2019, Met Office # # This file is part of Iris. # @@ -33,6 +33,434 @@ import iris.cube from iris.util import _meshgrid +from scipy.sparse import csc_matrix, diags as sparse_diags + + +def _transform_xy_arrays(crs_from, x, y, crs_to): + """ + Transform 2d points between cartopy coordinate reference systems. + + NOTE: copied private function from iris.analysis.cartography. + + Args: + + * crs_from, crs_to (:class:`cartopy.crs.Projection`): + The coordinate reference systems. + * x, y (arrays): + point locations defined in 'crs_from'. + + Returns: + x, y : Arrays of locations defined in 'crs_to'. + + """ + pts = crs_to.transform_points(crs_from, x, y) + return pts[..., 0], pts[..., 1] + + +def _regrid_weighted_curvilinear_to_rectilinear__prepare( + src_cube, weights, grid_cube): + """ + First (setup) part of 'regrid_weighted_curvilinear_to_rectilinear'. + + Check inputs and calculate the sparse regrid matrix and related info. + The 'regrid info' returned can be re-used over many 2d slices. + + """ + if src_cube.aux_factories: + msg = 'All source cube derived coordinates will be ignored.' + warnings.warn(msg) + + # Get the source cube x and y 2D auxiliary coordinates. + sx, sy = src_cube.coord(axis='x'), src_cube.coord(axis='y') + # Get the target grid cube x and y dimension coordinates. + tx, ty = get_xy_dim_coords(grid_cube) + + if sx.units != sy.units: + msg = 'The source cube x ({!r}) and y ({!r}) coordinates must ' \ + 'have the same units.' + raise ValueError(msg.format(sx.name(), sy.name())) + + if src_cube.coord_dims(sx) != src_cube.coord_dims(sy): + msg = 'The source cube x ({!r}) and y ({!r}) coordinates must ' \ + 'map onto the same cube dimensions.' + raise ValueError(msg.format(sx.name(), sy.name())) + + if sx.coord_system != sy.coord_system: + msg = 'The source cube x ({!r}) and y ({!r}) coordinates must ' \ + 'have the same coordinate system.' + raise ValueError(msg.format(sx.name(), sy.name())) + + if sx.coord_system is None: + msg = ('The source X and Y coordinates must have a defined ' + 'coordinate system.') + raise ValueError(msg) + + if tx.units != ty.units: + msg = 'The target grid cube x ({!r}) and y ({!r}) coordinates must ' \ + 'have the same units.' + raise ValueError(msg.format(tx.name(), ty.name())) + + if tx.coord_system is None: + msg = ('The target X and Y coordinates must have a defined ' + 'coordinate system.') + raise ValueError(msg) + + if tx.coord_system != ty.coord_system: + msg = 'The target grid cube x ({!r}) and y ({!r}) coordinates must ' \ + 'have the same coordinate system.' + raise ValueError(msg.format(tx.name(), ty.name())) + + if weights is None: + weights = np.ones(sx.shape) + if weights.shape != sx.shape: + msg = ('Provided weights must have the same shape as the X and Y ' + 'coordinates.') + raise ValueError(msg) + + if not tx.has_bounds() or not tx.is_contiguous(): + msg = 'The target grid cube x ({!r})coordinate requires ' \ + 'contiguous bounds.' + raise ValueError(msg.format(tx.name())) + + if not ty.has_bounds() or not ty.is_contiguous(): + msg = 'The target grid cube y ({!r}) coordinate requires ' \ + 'contiguous bounds.' + raise ValueError(msg.format(ty.name())) + + def _src_align_and_flatten(coord): + # Return a flattened, unmasked copy of a coordinate's points array that + # will align with a flattened version of the source cube's data. + # + # PP-TODO: Should work with any cube dimensions for X and Y coords. + # Probably needs fixing anyway? + # + points = coord.points + if src_cube.coord_dims(coord) == (1, 0): + points = points.T + if points.shape != src_cube.shape: + msg = 'The shape of the points array of {!r} is not compatible ' \ + 'with the shape of {!r}.' + raise ValueError(msg.format(coord.name(), src_cube.name())) + return np.asarray(points.flatten()) + + # Align and flatten the coordinate points of the source space. + sx_points = _src_align_and_flatten(sx) + sy_points = _src_align_and_flatten(sy) + + # Transform source X and Y points into the target coord-system, if needed. + if sx.coord_system != tx.coord_system: + src_crs = sx.coord_system.as_cartopy_projection() + tgt_crs = tx.coord_system.as_cartopy_projection() + sx_points, sy_points = _transform_xy_arrays( + src_crs, sx_points, sy_points, tgt_crs) + # + # TODO: how does this work with scaled units ?? + # e.g. if crs is latlon, units could be degrees OR radians ? + # + + # Wrap modular values (e.g. longitudes) if required. + modulus = sx.units.modulus + if modulus is not None: + # Match the source cube x coordinate range to the target grid + # cube x coordinate range. + min_sx, min_tx = np.min(sx.points), np.min(tx.points) + if min_sx < 0 and min_tx >= 0: + indices = np.where(sx_points < 0) + # Ensure += doesn't raise a TypeError + if not np.can_cast(modulus, sx_points.dtype): + sx_points = sx_points.astype(type(modulus), casting='safe') + sx_points[indices] += modulus + elif min_sx >= 0 and min_tx < 0: + indices = np.where(sx_points > (modulus / 2)) + # Ensure -= doesn't raise a TypeError + if not np.can_cast(modulus, sx_points.dtype): + sx_points = sx_points.astype(type(modulus), casting='safe') + sx_points[indices] -= modulus + + # Create target grid cube x and y cell boundaries. + tx_depth, ty_depth = tx.points.size, ty.points.size + tx_dim, = grid_cube.coord_dims(tx) + ty_dim, = grid_cube.coord_dims(ty) + + tx_cells = np.concatenate((tx.bounds[:, 0], + tx.bounds[-1, 1].reshape(1))) + ty_cells = np.concatenate((ty.bounds[:, 0], + ty.bounds[-1, 1].reshape(1))) + + # Determine the target grid cube x and y cells that bound + # the source cube x and y points. + + def _regrid_indices(cells, depth, points): + # Calculate the minimum difference in cell extent. + extent = np.min(np.diff(cells)) + if extent == 0: + # Detected an dimension coordinate with an invalid + # zero length cell extent. + msg = 'The target grid cube {} ({!r}) coordinate contains ' \ + 'a zero length cell extent.' + axis, name = 'x', tx.name() + if points is sy_points: + axis, name = 'y', ty.name() + raise ValueError(msg.format(axis, name)) + elif extent > 0: + # The cells of the dimension coordinate are in ascending order. + indices = np.searchsorted(cells, points, side='right') - 1 + else: + # The cells of the dimension coordinate are in descending order. + # np.searchsorted() requires ascending order, so we require to + # account for this restriction. + cells = cells[::-1] + right = np.searchsorted(cells, points, side='right') + left = np.searchsorted(cells, points, side='left') + indices = depth - right + # Only those points that exactly match the left-hand cell bound + # will differ between 'left' and 'right'. Thus their appropriate + # target cell location requires to be recalculated to give the + # correct descending [upper, lower) interval cell, source to target + # regrid behaviour. + delta = np.where(left != right)[0] + if delta.size: + indices[delta] = depth - left[delta] + return indices + + x_indices = _regrid_indices(tx_cells, tx_depth, sx_points) + y_indices = _regrid_indices(ty_cells, ty_depth, sy_points) + + # Now construct a sparse M x N matix, where M is the flattened target + # space, and N is the flattened source space. The sparse matrix will then + # be populated with those source cube points that contribute to a specific + # target cube cell. + + # Determine the valid indices and their offsets in M x N space. + # Calculate the valid M offsets. + cols = np.where((y_indices >= 0) & (y_indices < ty_depth) & + (x_indices >= 0) & (x_indices < tx_depth))[0] + + # Reduce the indices to only those that are valid. + x_indices = x_indices[cols] + y_indices = y_indices[cols] + + # Calculate the valid N offsets. + if ty_dim < tx_dim: + rows = y_indices * tx.points.size + x_indices + else: + rows = x_indices * ty.points.size + y_indices + + # Calculate the associated valid weights. + weights_flat = weights.flatten() + data = weights_flat[cols] + + # Build our sparse M x N matrix of weights. + sparse_matrix = csc_matrix((data, (rows, cols)), + shape=(grid_cube.data.size, src_cube.data.size)) + + # Performing a sparse sum to collapse the matrix to (M, 1). + sum_weights = sparse_matrix.sum(axis=1).getA() + + # Determine the rows (flattened target indices) that have a + # contribution from one or more source points. + rows = np.nonzero(sum_weights) + + # NOTE: when source points are masked, this 'sum_weights' is possibly + # incorrect and needs re-calculating. Likewise 'rows' may cover target + # cells which happen to get no data. This is dealt with by adjusting as + # required in the '__perform' function, below. + + regrid_info = (sparse_matrix, sum_weights, rows, grid_cube) + return regrid_info + + +def _regrid_weighted_curvilinear_to_rectilinear__perform( + src_cube, regrid_info): + """ + Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'. + + Perform the prepared regrid calculation on a single 2d cube. + + """ + sparse_matrix, sum_weights, rows, grid_cube = regrid_info + + # Calculate the numerator of the weighted mean (M, 1). + is_masked = ma.isMaskedArray(src_cube.data) + if not is_masked: + data = src_cube.data + else: + # Use raw data array + data = src_cube.data.data + # Check if there are any masked source points to take account of. + is_masked = np.ma.is_masked(src_cube.data) + if is_masked: + # Zero any masked source points so they add nothing in output sums. + mask = src_cube.data.mask + data[mask] = 0.0 + # Calculate a new 'sum_weights' to allow for missing source points. + # N.B. it is more efficient to use the original once-calculated + # sparse matrix, but in this case we can't. + # Hopefully, this post-multiplying by the validities is less costly + # than repeating the whole sparse calculation. + valid_src_cells = ~mask.flat[:] + src_cell_validity_factors = sparse_diags( + np.array(valid_src_cells, dtype=int), + 0) + valid_weights = sparse_matrix * src_cell_validity_factors + sum_weights = valid_weights.sum(axis=1).getA() + # Work out where output cells are missing all contributions. + # This allows for where 'rows' contains output cells that have no + # data because of missing input points. + zero_sums = sum_weights == 0.0 + # Make sure we can still divide by sum_weights[rows]. + sum_weights[zero_sums] = 1.0 + + # Calculate sum in each target cell, over contributions from each source + # cell. + numerator = sparse_matrix * data.reshape(-1, 1) + + # Create a template for the weighted mean result. + weighted_mean = ma.masked_all(numerator.shape, dtype=numerator.dtype) + + # Calculate final results in all relevant places. + weighted_mean[rows] = numerator[rows] / sum_weights[rows] + if is_masked: + # Ensure masked points where relevant source cells were all missing. + if np.any(zero_sums): + # Make masked if it wasn't. + weighted_mean = np.ma.asarray(weighted_mean) + # Mask where contributing sums were zero. + weighted_mean[zero_sums] = np.ma.masked + + # Construct the final regridded weighted mean cube. + tx = grid_cube.coord(axis='x', dim_coords=True) + ty = grid_cube.coord(axis='y', dim_coords=True) + tx_dim, = grid_cube.coord_dims(tx) + ty_dim, = grid_cube.coord_dims(ty) + dim_coords_and_dims = list(zip((ty.copy(), tx.copy()), (ty_dim, tx_dim))) + cube = iris.cube.Cube(weighted_mean.reshape(grid_cube.shape), + dim_coords_and_dims=dim_coords_and_dims) + cube.metadata = copy.deepcopy(src_cube.metadata) + + for coord in src_cube.coords(dimensions=()): + cube.add_aux_coord(coord.copy()) + + return cube + + +class CurvilinearRegridder(object): + """ + This class provides support for performing point-in-cell regridding + between a curvilinear source grid and a rectilinear target grid. + + """ + def __init__(self, src_grid_cube, target_grid_cube, weights=None): + """ + Create a regridder for conversions between the source + and target grids. + + Args: + + * src_grid_cube: + The :class:`~iris.cube.Cube` providing the source grid. + * tgt_grid_cube: + The :class:`~iris.cube.Cube` providing the target grid. + + Optional Args: + + * weights: + A :class:`numpy.ndarray` instance that defines the weights + for the grid cells of the source grid. Must have the same shape + as the data of the source grid. + If unspecified, equal weighting is assumed. + + """ + # Validity checks. + if not isinstance(src_grid_cube, iris.cube.Cube): + raise TypeError("'src_grid_cube' must be a Cube") + if not isinstance(target_grid_cube, iris.cube.Cube): + raise TypeError("'target_grid_cube' must be a Cube") + # Snapshot the state of the cubes to ensure that the regridder + # is impervious to external changes to the original source cubes. + self._src_cube = src_grid_cube.copy() + self._target_cube = target_grid_cube.copy() + self.weights = weights + self._regrid_info = None + + @staticmethod + def _get_horizontal_coord(cube, axis): + """ + Gets the horizontal coordinate on the supplied cube along the + specified axis. + + Args: + + * cube: + An instance of :class:`iris.cube.Cube`. + * axis: + Locate coordinates on `cube` along this axis. + + Returns: + The horizontal coordinate on the specified axis of the supplied + cube. + + """ + coords = cube.coords(axis=axis, dim_coords=False) + if len(coords) != 1: + raise ValueError('Cube {!r} must contain a single 1D {} ' + 'coordinate.'.format(cube.name()), axis) + return coords[0] + + def __call__(self, src): + """ + Regrid the supplied :class:`~iris.cube.Cube` on to the target grid of + this :class:`_CurvilinearRegridder`. + + The given cube must be defined with the same grid as the source + grid used to create this :class:`_CurvilinearRegridder`. + + Args: + + * src: + A :class:`~iris.cube.Cube` to be regridded. + + Returns: + A cube defined with the horizontal dimensions of the target + and the other dimensions from this cube. The data values of + this cube will be converted to values on the new grid using + point-in-cell regridding. + + """ + # Validity checks. + if not isinstance(src, iris.cube.Cube): + raise TypeError("'src' must be a Cube") + + gx = self._get_horizontal_coord(self._src_cube, 'x') + gy = self._get_horizontal_coord(self._src_cube, 'y') + src_grid = (gx.copy(), gy.copy()) + sx = self._get_horizontal_coord(src, 'x') + sy = self._get_horizontal_coord(src, 'y') + if (sx, sy) != src_grid: + raise ValueError('The given cube is not defined on the same ' + 'source grid as this regridder.') + + # Call the regridder function. + # This includes repeating over any non-XY dimensions, because the + # underlying routine does not support this. + # FOR NOW: we will use cube.slices and merge to achieve this, + # though that is not a terribly efficient method ... + # TODO: create a template result cube and paste data slices into it, + # which would be more efficient. + result_slices = iris.cube.CubeList([]) + for slice_cube in src.slices(sx): + if self._regrid_info is None: + # Calculate the basic regrid info just once. + self._regrid_info = \ + _regrid_weighted_curvilinear_to_rectilinear__prepare( + slice_cube, self.weights, self._target_cube) + slice_result = \ + _regrid_weighted_curvilinear_to_rectilinear__perform( + slice_cube, self._regrid_info) + result_slices.append(slice_result) + result = result_slices.merge_cube() + return result + class RectilinearRegridder(object): """ diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index a603adef98..fb9f1f31e1 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -26,11 +26,11 @@ import six import re +import warnings import cf_units import numpy as np -from iris._deprecation import warn_deprecated import iris.cube import iris.coords import iris.coord_systems @@ -85,14 +85,16 @@ def _construct_midpoint_coord(coord, circular=None): """ if circular and not hasattr(coord, 'circular'): - raise ValueError('Cannot produce circular midpoint from a coord ' - 'without the circular attribute') + msg = ("Cannot produce a circular midpoint for the '{}' coord, " + "which does not have a 'circular' attribute.") + raise ValueError(msg.format(coord.name())) if circular is None: circular = getattr(coord, 'circular', False) elif circular != getattr(coord, 'circular', False): - warn_deprecated('circular flag and Coord.circular attribute do ' - 'not match') + msg = ("Construction coordinate midpoints for the '{}' coordinate, " + "though it has the attribute 'circular'={}.") + warnings.warn(msg.format(circular, coord.circular, coord.name())) if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(coord) diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index abd6e3a633..7440cb1a31 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2017, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -22,10 +22,11 @@ from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa -import warnings +import inspect import math import operator -import inspect +import six +import warnings import cf_units import numpy as np @@ -917,9 +918,16 @@ def ws_units_func(u_cube, v_cube): if hasattr(data_func, 'nin'): self.nin = data_func.nin else: - (args, varargs, keywords, defaults) = inspect.getargspec(data_func) - self.nin = len(args) - ( - len(defaults) if defaults is not None else 0) + if six.PY2: + (args, _, _, defaults) = inspect.getargspec(data_func) + self.nin = len(args) - ( + len(defaults) if defaults is not None else 0) + else: + sig = inspect.signature(data_func) + args = [param for param in sig.parameters.values() + if (param.kind != param.KEYWORD_ONLY and + param.default is param.empty)] + self.nin = len(args) if self.nin not in [1, 2]: msg = ('{} requires {} input data arrays, the IFunc class ' diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 9654cdea0f..3eda09c624 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2013 - 2015, Met Office +# (C) British Crown Copyright 2013 - 2019, Met Office # # This file is part of Iris. # @@ -31,6 +31,8 @@ def _ones_like(cube): """ Return a copy of cube with the same mask, but all data values set to 1. + + The operation is non-lazy. """ ones_cube = cube.copy() ones_cube.data = np.ones_like(cube.data) @@ -85,6 +87,8 @@ def pearsonr(cube_a, cube_b, corr_coords=None, weights=None, mdtol=1., Reference: http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation + This operation is non-lazy. + """ # Assign larger cube to cube_1 diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index d58c488afc..8e14245214 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -66,9 +66,14 @@ def dependencies(self): """ def _as_defn(self): - defn = iris.coords.CoordDefn(self.standard_name, self.long_name, - self.var_name, self.units, - self.attributes, self.coord_system) + defn = iris.coords.CoordDefn( + self.standard_name, self.long_name, + self.var_name, self.units, + self.attributes, + self.coord_system, + # Slot for Coord 'climatological' property, which this + # doesn't have. + False,) return defn @abstractmethod diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index eb8a48eb8f..eab49e36de 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -76,6 +76,15 @@ def xml_element(self, doc, attrs=None): return coord_system_xml_element + @staticmethod + def _ellipsoid_to_globe(ellipsoid, globe_default): + if ellipsoid is not None: + globe = ellipsoid.as_cartopy_globe() + else: + globe = globe_default + + return globe + @abstractmethod def as_cartopy_crs(self): """ @@ -338,9 +347,8 @@ def xml_element(self, doc): return CoordSystem.xml_element(self, doc, self._pretty_attrs()) def _ccrs_kwargs(self): - globe = None - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() + globe = self._ellipsoid_to_globe(self.ellipsoid, None) + # Cartopy v0.12 provided the new arg north_pole_grid_longitude cartopy_kwargs = {'pole_longitude': self.grid_north_pole_longitude, 'pole_latitude': self.grid_north_pole_latitude, @@ -441,10 +449,7 @@ def __repr__(self): self.ellipsoid) def as_cartopy_crs(self): - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() - else: - globe = None + globe = self._ellipsoid_to_globe(self.ellipsoid, None) return ccrs.TransverseMercator( central_longitude=self.longitude_of_central_meridian, @@ -534,10 +539,7 @@ def __repr__(self): self.ellipsoid) def as_cartopy_crs(self): - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() - else: - globe = ccrs.Globe() + globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) warnings.warn('Discarding false_easting and false_northing that are ' 'not used by Cartopy.') @@ -553,7 +555,7 @@ def as_cartopy_projection(self): class VerticalPerspective(CoordSystem): """ - An geostationary satellite image map projection. + A vertical/near-side perspective satellite image map projection. """ @@ -563,7 +565,7 @@ def __init__(self, latitude_of_projection_origin, longitude_of_projection_origin, perspective_point_height, false_easting=0, false_northing=0, ellipsoid=None): """ - Constructs an Vertical Perspective Geostationary coord system. + Constructs a Vertical Perspective coord system. Args: @@ -592,9 +594,6 @@ def __init__(self, latitude_of_projection_origin, #: True latitude of planar origin in degrees. self.latitude_of_projection_origin = float( latitude_of_projection_origin) - if self.latitude_of_projection_origin != 0.0: - raise ValueError('Non-zero latitude of projection currently not' - ' supported by Cartopy.') #: True longitude of planar origin in degrees. self.longitude_of_projection_origin = float( @@ -602,16 +601,13 @@ def __init__(self, latitude_of_projection_origin, #: Altitude of satellite in metres. # test if perspective_point_height may be cast to float for proj.4 - test_pph = float(perspective_point_height) - self.perspective_point_height = perspective_point_height + self.perspective_point_height = float(perspective_point_height) #: X offset from planar origin in metres. - test_fe = float(false_easting) - self.false_easting = false_easting + self.false_easting = float(false_easting) #: Y offset from planar origin in metres. - test_fn = float(false_northing) - self.false_northing = false_northing + self.false_northing = float(false_northing) #: Ellipsoid definition. self.ellipsoid = ellipsoid @@ -619,7 +615,7 @@ def __init__(self, latitude_of_projection_origin, def __repr__(self): return "Vertical Perspective(latitude_of_projection_origin={!r}, "\ "longitude_of_projection_origin={!r}, "\ - "perspective_point_height = {!r}, "\ + "perspective_point_height={!r}, "\ "false_easting={!r}, false_northing={!r}, "\ "ellipsoid={!r})".format(self.latitude_of_projection_origin, self.longitude_of_projection_origin, @@ -629,12 +625,10 @@ def __repr__(self): self.ellipsoid) def as_cartopy_crs(self): - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() - else: - globe = ccrs.Globe() + globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) - return ccrs.Geostationary( + return ccrs.NearsidePerspective( + central_latitude=self.latitude_of_projection_origin, central_longitude=self.longitude_of_projection_origin, satellite_height=self.perspective_point_height, false_easting=self.false_easting, @@ -645,6 +639,104 @@ def as_cartopy_projection(self): return self.as_cartopy_crs() +class Geostationary(CoordSystem): + """ + A geostationary satellite image map projection. + + """ + + grid_mapping_name = 'geostationary' + + def __init__(self, latitude_of_projection_origin, + longitude_of_projection_origin, + perspective_point_height, sweep_angle_axis, false_easting=0, + false_northing=0, ellipsoid=None): + + """ + Constructs a Geostationary coord system. + + Args: + + * latitude_of_projection_origin (float): + True latitude of planar origin in degrees. + + * longitude_of_projection_origin (float): + True longitude of planar origin in degrees. + + * perspective_point_height (float): + Altitude of satellite in metres above the surface of the ellipsoid. + + * sweep_angle_axis (string): + The axis along which the satellite instrument sweeps - 'x' or 'y'. + + Kwargs: + + * false_easting (float): + X offset from planar origin in metres. Defaults to 0. + + * false_northing (float): + Y offset from planar origin in metres. Defaults to 0. + + * ellipsoid (iris.coord_systems.GeogCS): + :class:`GeogCS` defining the ellipsoid. + + """ + #: True latitude of planar origin in degrees. + self.latitude_of_projection_origin = float( + latitude_of_projection_origin) + if self.latitude_of_projection_origin != 0.0: + raise ValueError('Non-zero latitude of projection currently not' + ' supported by Cartopy.') + + #: True longitude of planar origin in degrees. + self.longitude_of_projection_origin = float( + longitude_of_projection_origin) + + #: Altitude of satellite in metres. + # test if perspective_point_height may be cast to float for proj.4 + self.perspective_point_height = float(perspective_point_height) + + #: X offset from planar origin in metres. + self.false_easting = float(false_easting) + + #: Y offset from planar origin in metres. + self.false_northing = float(false_northing) + + #: The axis along which the satellite instrument sweeps - 'x' or 'y'. + self.sweep_angle_axis = sweep_angle_axis + if self.sweep_angle_axis not in ('x', 'y'): + raise ValueError('Invalid sweep_angle_axis - must be "x" or "y"') + + #: Ellipsoid definition. + self.ellipsoid = ellipsoid + + def __repr__(self): + return "Geostationary(latitude_of_projection_origin={!r}, " \ + "longitude_of_projection_origin={!r}, " \ + "perspective_point_height={!r}, false_easting={!r}, " \ + "false_northing={!r}, sweep_angle_axis={!r}, " \ + "ellipsoid={!r}".format(self.latitude_of_projection_origin, + self.longitude_of_projection_origin, + self.perspective_point_height, + self.false_easting, + self.false_northing, + self.sweep_angle_axis, self.ellipsoid) + + def as_cartopy_crs(self): + globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) + + return ccrs.Geostationary( + central_longitude=self.longitude_of_projection_origin, + satellite_height=self.perspective_point_height, + false_easting=self.false_easting, + false_northing=self.false_northing, + globe=globe, + sweep_axis=self.sweep_angle_axis) + + def as_cartopy_projection(self): + return self.as_cartopy_crs() + + class Stereographic(CoordSystem): """ A stereographic map projection. @@ -712,10 +804,8 @@ def __repr__(self): self.ellipsoid) def as_cartopy_crs(self): - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() - else: - globe = ccrs.Globe() + globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) + return ccrs.Stereographic( self.central_lat, self.central_lon, self.false_easting, self.false_northing, @@ -807,10 +897,7 @@ def as_cartopy_crs(self): else: cutoff = None - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() - else: - globe = ccrs.Globe() + globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) # Cartopy v0.12 deprecated the use of secant_latitudes. if cartopy.__version__ < '0.12': @@ -866,10 +953,7 @@ def __repr__(self): return res.format(self=self) def as_cartopy_crs(self): - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() - else: - globe = ccrs.Globe() + globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) return ccrs.Mercator( central_longitude=self.longitude_of_projection_origin, @@ -935,10 +1019,8 @@ def __repr__(self): self.ellipsoid) def as_cartopy_crs(self): - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() - else: - globe = ccrs.Globe() + globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) + return ccrs.LambertAzimuthalEqualArea( central_longitude=self.longitude_of_projection_origin, central_latitude=self.latitude_of_projection_origin, @@ -1015,10 +1097,8 @@ def __repr__(self): self.ellipsoid) def as_cartopy_crs(self): - if self.ellipsoid is not None: - globe = self.ellipsoid.as_cartopy_globe() - else: - globe = ccrs.Globe() + globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) + return ccrs.AlbersEqualArea( central_longitude=self.longitude_of_central_meridian, central_latitude=self.latitude_of_projection_origin, diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 6487b3f7e6..27c793241a 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -24,7 +24,11 @@ import six from abc import ABCMeta, abstractproperty -import collections +from collections import namedtuple +try: # Python 3 + from collections.abc import Iterator +except ImportError: # Python 2.7 + from collections import Iterator import copy from itertools import chain from six.moves import zip_longest @@ -48,10 +52,11 @@ from iris.util import points_step -class CoordDefn(collections.namedtuple('CoordDefn', - ['standard_name', 'long_name', - 'var_name', 'units', - 'attributes', 'coord_system'])): +class CoordDefn(namedtuple('CoordDefn', + ['standard_name', 'long_name', + 'var_name', 'units', + 'attributes', 'coord_system', + 'climatological'])): """ Criterion for identifying a specific type of :class:`DimCoord` or :class:`AuxCoord` based on its metadata. @@ -86,11 +91,11 @@ def _sort_key(defn): return _sort_key(self) < _sort_key(other) -class CoordExtent(collections.namedtuple('_CoordExtent', ['name_or_coord', - 'minimum', - 'maximum', - 'min_inclusive', - 'max_inclusive'])): +class CoordExtent(namedtuple('_CoordExtent', ['name_or_coord', + 'minimum', + 'maximum', + 'min_inclusive', + 'max_inclusive'])): """Defines a range of values for a coordinate.""" def __new__(cls, name_or_coord, minimum, maximum, @@ -139,8 +144,8 @@ def __new__(cls, name_or_coord, minimum, maximum, # Private named tuple class for coordinate groups. -_GroupbyItem = collections.namedtuple('GroupbyItem', - 'groupby_point, groupby_slice') +_GroupbyItem = namedtuple('GroupbyItem', + 'groupby_point, groupby_slice') def _get_2d_coord_bound_grid(bounds): @@ -184,7 +189,7 @@ def _get_2d_coord_bound_grid(bounds): return result -class Cell(collections.namedtuple('Cell', ['point', 'bound'])): +class Cell(namedtuple('Cell', ['point', 'bound'])): """ An immutable representation of a single cell of a coordinate, including the sample point and/or boundary position. @@ -444,8 +449,9 @@ class Coord(six.with_metaclass(ABCMeta, CFVariableMixin)): _MODE_RDIV: '/'} def __init__(self, points, standard_name=None, long_name=None, - var_name=None, units='1', bounds=None, attributes=None, - coord_system=None): + var_name=None, units='1', bounds=None, + attributes=None, coord_system=None, + climatological=False): """ Constructs a single coordinate. @@ -473,13 +479,23 @@ def __init__(self, points, standard_name=None, long_name=None, points.shape + (n,). For example, a 1d coordinate with 100 points and two bounds per cell would have a bounds array of shape (100, 2) + Note if the data is a climatology, `climatological` + should be set. * attributes A dictionary containing other cf and user-defined attributes. * coord_system A :class:`~iris.coord_systems.CoordSystem` representing the coordinate system of the coordinate, e.g. a :class:`~iris.coord_systems.GeogCS` for a longitude Coord. - + * climatological (bool): + When True: the coordinate is a NetCDF climatological time axis. + When True: saving in NetCDF will give the coordinate variable a + 'climatology' attribute and will create a boundary variable called + '_climatology' in place of a standard bounds + attribute and bounds variable. + Will set to True when a climatological time axis is loaded + from NetCDF. + Always False if no bounds exist. """ #: CF standard name of the quantity that the coordinate represents. self.standard_name = standard_name @@ -505,6 +521,7 @@ def __init__(self, points, standard_name=None, long_name=None, self._bounds_dm = None self.points = points self.bounds = bounds + self.climatological = climatological def __getitem__(self, keys): """ @@ -593,7 +610,8 @@ def from_coord(cls, coord): kwargs['circular'] = getattr(coord, 'circular', False) return cls(**kwargs) - def _sanitise_array(self, src, ndmin): + @staticmethod + def _sanitise_array(src, ndmin): if _lazy.is_lazy_data(src): # Lazy data : just ensure ndmin requirement. ndims_missing = ndmin - src.ndim @@ -609,17 +627,20 @@ def _sanitise_array(self, src, ndmin): result = np.require(src, requirements='W') # Ensure the array has enough dimensions. # NB. Returns the *same object* if result.ndim >= ndmin - result = np.array(result, ndmin=ndmin, copy=False) + func = ma.array if ma.isMaskedArray(result) else np.array + result = func(result, ndmin=ndmin, copy=False) # We don't need to copy the data, but we do need to have our # own view so we can control the shape, etc. result = result.view() return result - def _points_getter(self): + @property + def points(self): """The coordinate points values as a NumPy array.""" return self._points_dm.data.view() - def _points_setter(self, points): + @points.setter + def points(self, points): # Set the points to a new array - as long as it's the same shape. # Ensure points has an ndmin of 1 and is either a numpy or lazy array. @@ -633,9 +654,8 @@ def _points_setter(self, points): else: self._points_dm.data = points - points = property(_points_getter, _points_setter) - - def _bounds_getter(self): + @property + def bounds(self): """ The coordinate bounds values, as a NumPy array, or None if no bound values are defined. @@ -649,10 +669,12 @@ def _bounds_getter(self): bounds = self._bounds_dm.data.view() return bounds - def _bounds_setter(self, bounds): + @bounds.setter + def bounds(self, bounds): # Ensure the bounds are a compatible shape. if bounds is None: self._bounds_dm = None + self._climatological = False else: bounds = self._sanitise_array(bounds, 2) if self.shape != bounds.shape[:-1]: @@ -665,7 +687,34 @@ def _bounds_setter(self, bounds): else: self._bounds_dm.data = bounds - bounds = property(_bounds_getter, _bounds_setter) + @property + def climatological(self): + """ + A boolean that controls whether the coordinate is a climatological + time axis, in which case the bounds represent a climatological period + rather than a normal period. + + Always reads as False if there are no bounds. + On set, the input value is cast to a boolean, exceptions raised + if units are not time units or if there are no bounds. + """ + return self._climatological if self.has_bounds() else False + + @climatological.setter + def climatological(self, value): + # Ensure the bounds are a compatible shape. + value = bool(value) + if value: + if not self.units.is_time_reference(): + emsg = ("Cannot set climatological coordinate, does not have" + " valid time reference units, got {!r}.") + raise TypeError(emsg.format(self.units)) + + if not self.has_bounds(): + emsg = "Cannot set climatological coordinate, no bounds exist." + raise ValueError(emsg) + + self._climatological = value def lazy_points(self): """ @@ -758,6 +807,9 @@ def _repr_other_metadata(self): fmt += ', attributes={self.attributes}' if self.coord_system: fmt += ', coord_system={self.coord_system}' + if self.climatological: + fmt += ', climatological={' \ + 'self.climatological}' result = fmt.format(self=self) return result @@ -837,7 +889,8 @@ def __ne__(self, other): def _as_defn(self): defn = CoordDefn(self.standard_name, self.long_name, self.var_name, - self.units, self.attributes, self.coord_system) + self.units, self.attributes, self.coord_system, + self.climatological) return defn # Must supply __hash__ as Python 3 does not enable it if __eq__ is defined. @@ -1356,16 +1409,22 @@ def serialize(x): 'Metadata may not be fully descriptive for {!r}.' warnings.warn(msg.format(self.name())) - # Determine the array library for stacking - al = da if self.has_bounds() \ - and _lazy.is_lazy_data(self.core_bounds()) else np + if self.has_bounds(): + item = self.core_bounds() + if dims_to_collapse is not None: + # Express main dims_to_collapse as non-negative integers + # and add the last (bounds specific) dimension. + dims_to_collapse = tuple( + dim % self.ndim for dim in dims_to_collapse) + (-1,) + else: + item = self.core_points() - item = al.concatenate(self.core_bounds()) if self.has_bounds() \ - else self.core_points() + # Determine the array library for stacking + al = da if _lazy.is_lazy_data(item) else np # Calculate the bounds and points along the right dims bounds = al.stack([item.min(axis=dims_to_collapse), - item.max(axis=dims_to_collapse)]).T + item.max(axis=dims_to_collapse)], axis=-1) points = al.array(bounds.sum(axis=-1) * 0.5, dtype=self.dtype) # Create the new collapsed coordinate. @@ -1656,6 +1715,8 @@ def xml_element(self, doc): if self.var_name: element.setAttribute('var_name', str(self.var_name)) element.setAttribute('units', repr(self.units)) + if self.climatological: + element.setAttribute('climatological', str(self.climatological)) if self.attributes: attributes_element = doc.createElement('attributes') @@ -1784,18 +1845,21 @@ def from_regular(cls, zeroth, step, count, standard_name=None, coord_system=coord_system, circular=circular) def __init__(self, points, standard_name=None, long_name=None, - var_name=None, units='1', bounds=None, attributes=None, - coord_system=None, circular=False): + var_name=None, units='1', bounds=None, + attributes=None, coord_system=None, circular=False, + climatological=False): """ Create a 1D, numeric, and strictly monotonic :class:`Coord` with read-only points and bounds. """ - super(DimCoord, self).__init__(points, standard_name=standard_name, - long_name=long_name, var_name=var_name, - units=units, bounds=bounds, - attributes=attributes, - coord_system=coord_system) + super(DimCoord, self).__init__( + points, standard_name=standard_name, + long_name=long_name, var_name=var_name, + units=units, bounds=bounds, + attributes=attributes, + coord_system=coord_system, + climatological=climatological) #: Whether the coordinate wraps by ``coord.units.modulus``. self.circular = bool(circular) @@ -1869,30 +1933,40 @@ def _new_points_requirements(self, points): Confirm that a new set of coord points adheres to the requirements for :class:`~iris.coords.DimCoord` points, being: * points are scalar or 1D, - * points are numeric, and + * points are numeric, + * points are not masked, and * points are monotonic. """ if points.ndim not in (0, 1): - raise ValueError( - 'The points array must be scalar or 1-dimensional.') + emsg = 'The {!r} {} points array must be scalar or 1-dimensional.' + raise ValueError(emsg.format(self.name(), self.__class__.__name__)) if not np.issubdtype(points.dtype, np.number): - raise ValueError('The points array must be numeric.') + emsg = 'The {!r} {} points array must be numeric.' + raise ValueError(emsg.format(self.name(), self.__class__.__name__)) + if ma.is_masked(points): + emsg = 'A {!r} {} points array must not be masked.' + raise TypeError(emsg.format(self.name(), self.__class__.__name__)) if points.size > 1 and not iris.util.monotonic(points, strict=True): - raise ValueError('The points array must be strictly monotonic.') + emsg = 'The {!r} {} points array must be strictly monotonic.' + raise ValueError(emsg.format(self.name(), self.__class__.__name__)) - def _points_setter(self, points): + @Coord.points.setter + def points(self, points): # DimCoord always realises the points, to allow monotonicity checks. # Ensure it is an actual array, and also make our own copy so that we # can make it read-only. points = _lazy.as_concrete_data(points) - points = np.array(points) + # Make sure that we have an array (any type of array). + points = np.asanyarray(points) # Check validity requirements for dimension-coordinate points. self._new_points_requirements(points) + # Cast to a numpy array for masked arrays with no mask. + points = np.array(points) - # Invoke the generic points setter. - super(DimCoord, self)._points_setter(points) + # Call the parent points setter. + super(DimCoord, self.__class__).points.fset(self, points) if self._points_dm is not None: # Re-fetch the core array, as the super call may replace it. @@ -1902,27 +1976,32 @@ def _points_setter(self, points): # Make the array read-only. points.flags.writeable = False - points = property(Coord._points_getter, _points_setter) - def _new_bounds_requirements(self, bounds): """ Confirm that a new set of coord bounds adheres to the requirements for :class:`~iris.coords.DimCoord` bounds, being: * bounds are compatible in shape with the points - * bounds are numeric, and + * bounds are numeric, + * bounds are not masked, and * bounds are monotonic in the first dimension. """ # Ensure the bounds are a compatible shape. if self.shape != bounds.shape[:-1] and \ not (self.shape == (1,) and bounds.ndim == 1): - raise ValueError( - "The shape of the bounds array should be " - "points.shape + (n_bounds,)") - # Checks for numeric and monotonic. + emsg = ('The shape of the {!r} {} bounds array should be ' + 'points.shape + (n_bounds)') + raise ValueError(emsg.format(self.name(), self.__class__.__name__)) + # Checks for numeric. if not np.issubdtype(bounds.dtype, np.number): - raise ValueError('The bounds array must be numeric.') - + emsg = 'The {!r} {} bounds array must be numeric.' + raise ValueError(emsg.format(self.name(), self.__class__.__name__)) + # Check not masked. + if ma.is_masked(bounds): + emsg = 'A {!r} {} bounds array must not be masked.' + raise TypeError(emsg.format(self.name(), self.__class__.__name__)) + + # Check bounds are monotonic. if bounds.ndim > 1: n_bounds = bounds.shape[-1] n_points = bounds.shape[0] @@ -1933,25 +2012,33 @@ def _new_bounds_requirements(self, bounds): monotonic, direction = iris.util.monotonic( bounds[:, b_index], strict=True, return_direction=True) if not monotonic: - raise ValueError('The bounds array must be strictly ' - 'monotonic.') + emsg = ('The {!r} {} bounds array must be strictly ' + 'monotonic.') + raise ValueError(emsg.format(self.name(), + self.__class__.__name__)) directions.add(direction) if len(directions) != 1: - raise ValueError('The direction of monotonicity must be ' - 'consistent across all bounds') + emsg = ('The direction of monotonicity for {!r} {} must ' + 'be consistent across all bounds.') + raise ValueError(emsg.format(self.name(), + self.__class__.__name__)) - def _bounds_setter(self, bounds): + @Coord.bounds.setter + def bounds(self, bounds): if bounds is not None: # Ensure we have a realised array of new bounds values. bounds = _lazy.as_concrete_data(bounds) - bounds = np.array(bounds) + # Make sure we have an array (any type of array). + bounds = np.asanyarray(bounds) # Check validity requirements for dimension-coordinate bounds. self._new_bounds_requirements(bounds) + # Cast to a numpy array for masked arrays with no mask. + bounds = np.array(bounds) - # Invoke the generic bounds setter. - super(DimCoord, self)._bounds_setter(bounds) + # Call the parent bounds setter. + super(DimCoord, self.__class__).bounds.fset(self, bounds) if self._bounds_dm is not None: # Re-fetch the core array, as the super call may replace it. @@ -1961,8 +2048,6 @@ def _bounds_setter(self, bounds): # Ensure the array is read-only. bounds.flags.writeable = False - bounds = property(Coord._bounds_getter, _bounds_setter) - def is_monotonic(self): return True @@ -2016,13 +2101,13 @@ def __init__(self, data, standard_name=None, long_name=None, Kwargs: * standard_name: - CF standard name of the coordinate. + CF standard name of the cell measure. * long_name: - Descriptive name of the coordinate. + Descriptive name of the cell measure. * var_name: - The netCDF variable name for the coordinate. + The netCDF variable name for the cell measure. * units - The :class:`~cf_units.Unit` of the coordinate's values. + The :class:`~cf_units.Unit` of the cell measure's values. Can be a string, which will be converted to a Unit object. * attributes A dictionary containing other CF and user-defined attributes. @@ -2031,16 +2116,16 @@ def __init__(self, data, standard_name=None, long_name=None, are the only valid entries. """ - #: CF standard name of the quantity that the coordinate represents. + #: CF standard name of the quantity that the cell measure represents. self.standard_name = standard_name - #: Descriptive name of the coordinate. + #: Descriptive name of the cell measure. self.long_name = long_name - #: The netCDF variable name for the coordinate. + #: The netCDF variable name for the cell measure. self.var_name = var_name - #: Unit of the quantity that the coordinate represents. + #: Unit of the quantity that the cell measure represents. self.units = units #: Other attributes, including user specified attributes that @@ -2070,11 +2155,6 @@ def data(self, data): if data is None: raise ValueError('The data payload of a CellMeasure may not be ' 'None; it must be a numpy array or equivalent.') - if _lazy.is_lazy_data(data) and data.dtype.kind in 'biu': - # Non-floating cell measures are not valid up to CF v1.7 - msg = ('Cannot create cell measure with lazy data of type {}, as ' - 'integer types are not currently supported.') - raise ValueError(msg.format(data.dtype)) if data.shape == (): # If we have a scalar value, promote the shape from () to (1,). # NOTE: this way also *realises* it. Don't think that matters. @@ -2237,16 +2317,18 @@ def __init__(self, method, coords=None, intervals=None, comments=None): raise TypeError("'method' must be a string - got a '%s'" % type(method)) + default_name = CFVariableMixin._DEFAULT_NAME _coords = [] if coords is None: pass elif isinstance(coords, Coord): - _coords.append(coords.name()) + _coords.append(coords.name(token=True)) elif isinstance(coords, six.string_types): - _coords.append(coords) + _coords.append(CFVariableMixin.token(coords) or default_name) else: - normalise = (lambda coord: coord.name() if - isinstance(coord, Coord) else coord) + normalise = (lambda coord: coord.name(token=True) if + isinstance(coord, Coord) else + CFVariableMixin.token(coord) or default_name) _coords.extend([normalise(coord) for coord in coords]) _intervals = [] @@ -2315,7 +2397,7 @@ def xml_element(self, doc): # See Coord.cells() for the description/context. -class _CellIterator(collections.Iterator): +class _CellIterator(Iterator): def __init__(self, coord): self._coord = coord if coord.ndim != 1: @@ -2331,7 +2413,7 @@ def __next__(self): # See ExplicitCoord._group() for the description/context. -class _GroupIterator(collections.Iterator): +class _GroupIterator(Iterator): def __init__(self, points): self._points = points self._start = 0 diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 81152a1293..af405ebe20 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -24,11 +24,23 @@ from six.moves import (filter, input, map, range, zip) # noqa import six -import collections +from collections import namedtuple, OrderedDict +try: # Python 3 + from collections.abc import (Iterable, + Container, + Mapping, + MutableMapping, + Iterator) +except ImportError: # Python 2.7 + from collections import (Iterable, + Container, + Mapping, + MutableMapping, + Iterator) import copy from copy import deepcopy import datetime -from functools import reduce +from functools import reduce, partial import operator import warnings from xml.dom.minidom import Document @@ -58,13 +70,13 @@ __all__ = ['Cube', 'CubeList', 'CubeMetadata'] -class CubeMetadata(collections.namedtuple('CubeMetadata', - ['standard_name', - 'long_name', - 'var_name', - 'units', - 'attributes', - 'cell_methods'])): +class CubeMetadata(namedtuple('CubeMetadata', + ['standard_name', + 'long_name', + 'var_name', + 'units', + 'attributes', + 'cell_methods'])): """ Represents the phenomenon metadata for a single :class:`Cube`. @@ -219,6 +231,11 @@ def __repr__(self): """Runs repr on every cube.""" return '[%s]' % ',\n'.join([repr(cube) for cube in self]) + def _repr_html_(self): + from iris.experimental.representation import CubeListRepresentation + representer = CubeListRepresentation(self) + return representer.repr_html() + # TODO #370 Which operators need overloads? def __add__(self, other): return CubeList(list.__add__(self, other)) @@ -495,7 +512,7 @@ def concatenate_cube(self, check_aux_coords=True): raise ValueError("can't concatenate an empty CubeList") names = [cube.metadata.name() for cube in self] - unique_names = list(collections.OrderedDict.fromkeys(names)) + unique_names = list(OrderedDict.fromkeys(names)) if len(unique_names) == 1: res = iris._concatenate.concatenate( self, error_on_mismatch=True, @@ -629,7 +646,7 @@ def _is_single_item(testee): """ return (isinstance(testee, six.string_types) or - not isinstance(testee, collections.Iterable)) + not isinstance(testee, Iterable)) class Cube(CFVariableMixin): @@ -952,7 +969,7 @@ def _check_multi_dim_metadata(self, metadata, data_dims): # Convert to a tuple of integers if data_dims is None: data_dims = tuple() - elif isinstance(data_dims, collections.Container): + elif isinstance(data_dims, Container): data_dims = tuple(int(d) for d in data_dims) else: data_dims = (int(data_dims),) @@ -1064,7 +1081,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim): raise ValueError('The dim_coord may not be an AuxCoord instance.') # Convert data_dim to a single integer - if isinstance(data_dim, collections.Container): + if isinstance(data_dim, Container): if len(data_dim) != 1: raise ValueError('The supplied data dimension must be a' ' single number.') @@ -1123,13 +1140,30 @@ def remove_cell_measure(self, cell_measure): Args: - * cell_measure (CellMeasure) - The CellMeasure to remove from the cube. + * cell_measure (string or cell_measure) + The (name of the) cell measure to remove from the cube. As either - See also - :meth:`Cube.add_cell_measure()` + (a) a :attr:`standard_name`, :attr:`long_name`, or + :attr:`var_name`. Defaults to value of `default` + (which itself defaults to `unknown`) as defined in + :class:`iris._cube_coord_common.CFVariableMixin`. + + (b) a cell_measure instance with metadata equal to that of + the desired cell_measures. + + .. note:: + + If the argument given does not represent a valid cell_measure on + the cube, an :class:`iris.exceptions.CellMeasureNotFoundError` + is raised. + + .. seealso:: + + :meth:`Cube.add_cell_measure()` """ + cell_measure = self.cell_measure(cell_measure) + self._cell_measures_and_dims = [[cell_measure_, dim] for cell_measure_, dim in self._cell_measures_and_dims if cell_measure_ is not cell_measure] @@ -1366,7 +1400,7 @@ def coords(self, name_or_coord=None, standard_name=None, if guess_axis(coord_) == axis] if attributes is not None: - if not isinstance(attributes, collections.Mapping): + if not isinstance(attributes, Mapping): msg = 'The attributes keyword was expecting a dictionary ' \ 'type, but got a %s instead.' % type(attributes) raise ValueError(msg) @@ -1396,7 +1430,7 @@ def attr_filter(coord_): self.coord_dims(coord_)] if dimensions is not None: - if not isinstance(dimensions, collections.Container): + if not isinstance(dimensions, Container): dimensions = [dimensions] dimensions = tuple(dimensions) coords_and_factories = [coord_ for coord_ in coords_and_factories @@ -2790,17 +2824,22 @@ def transpose(self, new_order=None): dim_mapping = {src: dest for dest, src in enumerate(new_order)} - def remap_dim_coord(coord_and_dim): - coord, dim = coord_and_dim - return coord, dim_mapping[dim] - self._dim_coords_and_dims = list(map(remap_dim_coord, - self._dim_coords_and_dims)) + # Remap all cube dimensional metadata (dim and aux coords and cell + # measures). + def remap_cube_metadata(metadata_and_dims): + metadata, dims = metadata_and_dims + if isinstance(dims, Iterable): + dims = tuple(dim_mapping[dim] for dim in dims) + else: + dims = dim_mapping[dims] + return metadata, dims - def remap_aux_coord(coord_and_dims): - coord, dims = coord_and_dims - return coord, tuple(dim_mapping[dim] for dim in dims) - self._aux_coords_and_dims = list(map(remap_aux_coord, + self._dim_coords_and_dims = list(map(remap_cube_metadata, + self._dim_coords_and_dims)) + self._aux_coords_and_dims = list(map(remap_cube_metadata, self._aux_coords_and_dims)) + self._cell_measures_and_dims = list(map(remap_cube_metadata, + self._cell_measures_and_dims)) def xml(self, checksum=False, order=True, byteorder=True): """ @@ -3018,11 +3057,10 @@ def __eq__(self, other): result = not (coord_comparison['not_equal'] or coord_comparison['non_equal_data_dimension']) - # having checked everything else, check approximate data - # equality - loading the data if has not already been loaded. + # Having checked everything else, check approximate data equality. if result: - result = np.all(np.abs(self.data - other.data) < 1e-8) - + result = da.allclose(self.core_data(), + other.core_data()).compute() return result # Must supply __ne__, Python does not defer to __eq__ for negative equality @@ -3185,7 +3223,7 @@ def collapsed(self, coords, aggregator, **kwargs): for coord in coords] # Remove duplicate dimensions. - new_dims = collections.OrderedDict.fromkeys( + new_dims = OrderedDict.fromkeys( d for dim in dims_to_collapse for d in dim) # Reverse the dimensions so the order can be maintained when # reshaping the data. @@ -3316,10 +3354,6 @@ def aggregated_by(self, coords, aggregator, **kwargs): Returns: :class:`iris.cube.Cube`. - .. note:: - - This operation does not yet have support for lazy evaluation. - For example: >>> import iris @@ -3413,29 +3447,46 @@ def aggregated_by(self, coords, aggregator, **kwargs): data_shape[dimension_to_groupby] = len(groupby) # Aggregate the group-by data. - cube_slice = [slice(None, None)] * len(data_shape) - - for i, groupby_slice in enumerate(groupby.group()): - # Slice the cube with the group-by slice to create a group-by - # sub-cube. - cube_slice[dimension_to_groupby] = groupby_slice - groupby_sub_cube = self[tuple(cube_slice)] - # Perform the aggregation over the group-by sub-cube and - # repatriate the aggregated data into the aggregate-by cube data. - cube_slice[dimension_to_groupby] = i - result = aggregator.aggregate(groupby_sub_cube.data, - axis=dimension_to_groupby, - **kwargs) - - # Determine aggregation result data type for the aggregate-by cube - # data on first pass. - if i == 0: - if ma.isMaskedArray(self.data): - aggregateby_data = ma.zeros(data_shape, dtype=result.dtype) - else: - aggregateby_data = np.zeros(data_shape, dtype=result.dtype) - - aggregateby_data[tuple(cube_slice)] = result + if (aggregator.lazy_func is not None and self.has_lazy_data()): + front_slice = (slice(None, None),) * dimension_to_groupby + back_slice = (slice(None, None),) * (len(data_shape) - + dimension_to_groupby - + 1) + groupby_subcubes = map( + lambda groupby_slice: + self[front_slice + (groupby_slice,) + back_slice].lazy_data(), + groupby.group() + ) + agg = partial(aggregator.lazy_aggregate, + axis=dimension_to_groupby, + **kwargs) + result = list(map(agg, groupby_subcubes)) + aggregateby_data = da.stack(result, axis=dimension_to_groupby) + else: + cube_slice = [slice(None, None)] * len(data_shape) + for i, groupby_slice in enumerate(groupby.group()): + # Slice the cube with the group-by slice to create a group-by + # sub-cube. + cube_slice[dimension_to_groupby] = groupby_slice + groupby_sub_cube = self[tuple(cube_slice)] + # Perform the aggregation over the group-by sub-cube and + # repatriate the aggregated data into the aggregate-by + # cube data. + cube_slice[dimension_to_groupby] = i + result = aggregator.aggregate(groupby_sub_cube.data, + axis=dimension_to_groupby, + **kwargs) + + # Determine aggregation result data type for the aggregate-by + # cube data on first pass. + if i == 0: + if ma.isMaskedArray(self.data): + aggregateby_data = ma.zeros(data_shape, + dtype=result.dtype) + else: + aggregateby_data = np.zeros(data_shape, + dtype=result.dtype) + aggregateby_data[tuple(cube_slice)] = result # Add the aggregation meta data to the aggregate-by cube. aggregator.update_metadata(aggregateby_cube, @@ -3753,7 +3804,7 @@ def regrid(self, grid, scheme): return regridder(self) -class ClassDict(collections.MutableMapping, object): +class ClassDict(MutableMapping, object): """ A mapping that stores objects keyed on their superclasses and their names. @@ -3839,7 +3890,7 @@ def sorted_axes(axes): # See Cube.slice() for the definition/context. -class _SliceIterator(collections.Iterator): +class _SliceIterator(Iterator): def __init__(self, cube, dims_index, requested_dims, ordered): self._cube = cube diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 91e48af61a..edcff3d3c4 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2013 - 2018, Met Office +# (C) British Crown Copyright 2013 - 2019, Met Office # # This file is part of Iris. # @@ -32,16 +32,19 @@ import numpy as np import numpy.ma as ma import scipy.interpolate -from scipy.sparse import csc_matrix, diags as sparse_diags import six import iris.analysis.cartography from iris.analysis._interpolation import (get_xy_dim_coords, get_xy_coords, snapshot_grid) -from iris.analysis._regrid import RectilinearRegridder +from iris.analysis._regrid import ( + RectilinearRegridder, + _regrid_weighted_curvilinear_to_rectilinear__prepare, + _regrid_weighted_curvilinear_to_rectilinear__perform +) import iris.coord_systems import iris.cube -from iris.util import _meshgrid, promote_aux_coord_to_dim_coord +from iris.util import _meshgrid _Version = namedtuple('Version', ('major', 'minor', 'micro')) @@ -780,27 +783,6 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, return new_cube -def _transform_xy_arrays(crs_from, x, y, crs_to): - """ - Transform 2d points between cartopy coordinate reference systems. - - NOTE: copied private function from iris.analysis.cartography. - - Args: - - * crs_from, crs_to (:class:`cartopy.crs.Projection`): - The coordinate reference systems. - * x, y (arrays): - point locations defined in 'crs_from'. - - Returns: - x, y : Arrays of locations defined in 'crs_to'. - - """ - pts = crs_to.transform_points(crs_from, x, y) - return pts[..., 0], pts[..., 1] - - def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): """ Return a new cube with the data values calculated using the weighted @@ -856,428 +838,17 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): return result -def _regrid_weighted_curvilinear_to_rectilinear__prepare( - src_cube, weights, grid_cube): - """ - First (setup) part of 'regrid_weighted_curvilinear_to_rectilinear'. - - Check inputs and calculate the sparse regrid matrix and related info. - The 'regrid info' returned can be re-used over many 2d slices. - - """ - if src_cube.aux_factories: - msg = 'All source cube derived coordinates will be ignored.' - warnings.warn(msg) - - # Get the source cube x and y 2D auxiliary coordinates. - sx, sy = src_cube.coord(axis='x'), src_cube.coord(axis='y') - # Get the target grid cube x and y dimension coordinates. - tx, ty = get_xy_dim_coords(grid_cube) - - if sx.units != sy.units: - msg = 'The source cube x ({!r}) and y ({!r}) coordinates must ' \ - 'have the same units.' - raise ValueError(msg.format(sx.name(), sy.name())) - - if src_cube.coord_dims(sx) != src_cube.coord_dims(sy): - msg = 'The source cube x ({!r}) and y ({!r}) coordinates must ' \ - 'map onto the same cube dimensions.' - raise ValueError(msg.format(sx.name(), sy.name())) - - if sx.coord_system != sy.coord_system: - msg = 'The source cube x ({!r}) and y ({!r}) coordinates must ' \ - 'have the same coordinate system.' - raise ValueError(msg.format(sx.name(), sy.name())) - - if sx.coord_system is None: - msg = ('The source X and Y coordinates must have a defined ' - 'coordinate system.') - raise ValueError(msg) - - if tx.units != ty.units: - msg = 'The target grid cube x ({!r}) and y ({!r}) coordinates must ' \ - 'have the same units.' - raise ValueError(msg.format(tx.name(), ty.name())) - - if tx.coord_system is None: - msg = ('The target X and Y coordinates must have a defined ' - 'coordinate system.') - raise ValueError(msg) - - if tx.coord_system != ty.coord_system: - msg = 'The target grid cube x ({!r}) and y ({!r}) coordinates must ' \ - 'have the same coordinate system.' - raise ValueError(msg.format(tx.name(), ty.name())) - - if weights is None: - weights = np.ones(sx.shape) - if weights.shape != sx.shape: - msg = ('Provided weights must have the same shape as the X and Y ' - 'coordinates.') - raise ValueError(msg) - - if not tx.has_bounds() or not tx.is_contiguous(): - msg = 'The target grid cube x ({!r})coordinate requires ' \ - 'contiguous bounds.' - raise ValueError(msg.format(tx.name())) - - if not ty.has_bounds() or not ty.is_contiguous(): - msg = 'The target grid cube y ({!r}) coordinate requires ' \ - 'contiguous bounds.' - raise ValueError(msg.format(ty.name())) - - def _src_align_and_flatten(coord): - # Return a flattened, unmasked copy of a coordinate's points array that - # will align with a flattened version of the source cube's data. - # - # PP-TODO: Should work with any cube dimensions for X and Y coords. - # Probably needs fixing anyway? - # - points = coord.points - if src_cube.coord_dims(coord) == (1, 0): - points = points.T - if points.shape != src_cube.shape: - msg = 'The shape of the points array of {!r} is not compatible ' \ - 'with the shape of {!r}.' - raise ValueError(msg.format(coord.name(), src_cube.name())) - return np.asarray(points.flatten()) - - # Align and flatten the coordinate points of the source space. - sx_points = _src_align_and_flatten(sx) - sy_points = _src_align_and_flatten(sy) - - # Transform source X and Y points into the target coord-system, if needed. - if sx.coord_system != tx.coord_system: - src_crs = sx.coord_system.as_cartopy_projection() - tgt_crs = tx.coord_system.as_cartopy_projection() - sx_points, sy_points = _transform_xy_arrays( - src_crs, sx_points, sy_points, tgt_crs) - # - # TODO: how does this work with scaled units ?? - # e.g. if crs is latlon, units could be degrees OR radians ? - # - - # Wrap modular values (e.g. longitudes) if required. - modulus = sx.units.modulus - if modulus is not None: - # Match the source cube x coordinate range to the target grid - # cube x coordinate range. - min_sx, min_tx = np.min(sx.points), np.min(tx.points) - if min_sx < 0 and min_tx >= 0: - indices = np.where(sx_points < 0) - # Ensure += doesn't raise a TypeError - if not np.can_cast(modulus, sx_points.dtype): - sx_points = sx_points.astype(type(modulus), casting='safe') - sx_points[indices] += modulus - elif min_sx >= 0 and min_tx < 0: - indices = np.where(sx_points > (modulus / 2)) - # Ensure -= doesn't raise a TypeError - if not np.can_cast(modulus, sx_points.dtype): - sx_points = sx_points.astype(type(modulus), casting='safe') - sx_points[indices] -= modulus - - # Create target grid cube x and y cell boundaries. - tx_depth, ty_depth = tx.points.size, ty.points.size - tx_dim, = grid_cube.coord_dims(tx) - ty_dim, = grid_cube.coord_dims(ty) - - tx_cells = np.concatenate((tx.bounds[:, 0], - tx.bounds[-1, 1].reshape(1))) - ty_cells = np.concatenate((ty.bounds[:, 0], - ty.bounds[-1, 1].reshape(1))) - - # Determine the target grid cube x and y cells that bound - # the source cube x and y points. - - def _regrid_indices(cells, depth, points): - # Calculate the minimum difference in cell extent. - extent = np.min(np.diff(cells)) - if extent == 0: - # Detected an dimension coordinate with an invalid - # zero length cell extent. - msg = 'The target grid cube {} ({!r}) coordinate contains ' \ - 'a zero length cell extent.' - axis, name = 'x', tx.name() - if points is sy_points: - axis, name = 'y', ty.name() - raise ValueError(msg.format(axis, name)) - elif extent > 0: - # The cells of the dimension coordinate are in ascending order. - indices = np.searchsorted(cells, points, side='right') - 1 - else: - # The cells of the dimension coordinate are in descending order. - # np.searchsorted() requires ascending order, so we require to - # account for this restriction. - cells = cells[::-1] - right = np.searchsorted(cells, points, side='right') - left = np.searchsorted(cells, points, side='left') - indices = depth - right - # Only those points that exactly match the left-hand cell bound - # will differ between 'left' and 'right'. Thus their appropriate - # target cell location requires to be recalculated to give the - # correct descending [upper, lower) interval cell, source to target - # regrid behaviour. - delta = np.where(left != right)[0] - if delta.size: - indices[delta] = depth - left[delta] - return indices - - x_indices = _regrid_indices(tx_cells, tx_depth, sx_points) - y_indices = _regrid_indices(ty_cells, ty_depth, sy_points) - - # Now construct a sparse M x N matix, where M is the flattened target - # space, and N is the flattened source space. The sparse matrix will then - # be populated with those source cube points that contribute to a specific - # target cube cell. - - # Determine the valid indices and their offsets in M x N space. - # Calculate the valid M offsets. - cols = np.where((y_indices >= 0) & (y_indices < ty_depth) & - (x_indices >= 0) & (x_indices < tx_depth))[0] - - # Reduce the indices to only those that are valid. - x_indices = x_indices[cols] - y_indices = y_indices[cols] - - # Calculate the valid N offsets. - if ty_dim < tx_dim: - rows = y_indices * tx.points.size + x_indices - else: - rows = x_indices * ty.points.size + y_indices - - # Calculate the associated valid weights. - weights_flat = weights.flatten() - data = weights_flat[cols] - - # Build our sparse M x N matrix of weights. - sparse_matrix = csc_matrix((data, (rows, cols)), - shape=(grid_cube.data.size, src_cube.data.size)) - - # Performing a sparse sum to collapse the matrix to (M, 1). - sum_weights = sparse_matrix.sum(axis=1).getA() - - # Determine the rows (flattened target indices) that have a - # contribution from one or more source points. - rows = np.nonzero(sum_weights) - - # NOTE: when source points are masked, this 'sum_weights' is possibly - # incorrect and needs re-calculating. Likewise 'rows' may cover target - # cells which happen to get no data. This is dealt with by adjusting as - # required in the '__perform' function, below. - - regrid_info = (sparse_matrix, sum_weights, rows, grid_cube) - return regrid_info - - -def _regrid_weighted_curvilinear_to_rectilinear__perform( - src_cube, regrid_info): - """ - Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'. - - Perform the prepared regrid calculation on a single 2d cube. - - """ - sparse_matrix, sum_weights, rows, grid_cube = regrid_info - - # Calculate the numerator of the weighted mean (M, 1). - is_masked = ma.isMaskedArray(src_cube.data) - if not is_masked: - data = src_cube.data - else: - # Use raw data array - data = src_cube.data.data - # Check if there are any masked source points to take account of. - is_masked = np.ma.is_masked(src_cube.data) - if is_masked: - # Zero any masked source points so they add nothing in output sums. - mask = src_cube.data.mask - data[mask] = 0.0 - # Calculate a new 'sum_weights' to allow for missing source points. - # N.B. it is more efficient to use the original once-calculated - # sparse matrix, but in this case we can't. - # Hopefully, this post-multiplying by the validities is less costly - # than repeating the whole sparse calculation. - valid_src_cells = ~mask.flat[:] - src_cell_validity_factors = sparse_diags( - np.array(valid_src_cells, dtype=int), - 0) - valid_weights = sparse_matrix * src_cell_validity_factors - sum_weights = valid_weights.sum(axis=1).getA() - # Work out where output cells are missing all contributions. - # This allows for where 'rows' contains output cells that have no - # data because of missing input points. - zero_sums = sum_weights == 0.0 - # Make sure we can still divide by sum_weights[rows]. - sum_weights[zero_sums] = 1.0 - - # Calculate sum in each target cell, over contributions from each source - # cell. - numerator = sparse_matrix * data.reshape(-1, 1) - - # Create a template for the weighted mean result. - weighted_mean = ma.masked_all(numerator.shape, dtype=numerator.dtype) - - # Calculate final results in all relevant places. - weighted_mean[rows] = numerator[rows] / sum_weights[rows] - if is_masked: - # Ensure masked points where relevant source cells were all missing. - if np.any(zero_sums): - # Make masked if it wasn't. - weighted_mean = np.ma.asarray(weighted_mean) - # Mask where contributing sums were zero. - weighted_mean[zero_sums] = np.ma.masked - - # Construct the final regridded weighted mean cube. - tx = grid_cube.coord(axis='x', dim_coords=True) - ty = grid_cube.coord(axis='y', dim_coords=True) - tx_dim, = grid_cube.coord_dims(tx) - ty_dim, = grid_cube.coord_dims(ty) - dim_coords_and_dims = list(zip((ty.copy(), tx.copy()), (ty_dim, tx_dim))) - cube = iris.cube.Cube(weighted_mean.reshape(grid_cube.shape), - dim_coords_and_dims=dim_coords_and_dims) - cube.metadata = copy.deepcopy(src_cube.metadata) - - for coord in src_cube.coords(dimensions=()): - cube.add_aux_coord(coord.copy()) - - return cube - - -class _CurvilinearRegridder(object): - """ - This class provides support for performing point-in-cell regridding - between a curvilinear source grid and a rectilinear target grid. - - """ - def __init__(self, src_grid_cube, target_grid_cube, weights=None): - """ - Create a regridder for conversions between the source - and target grids. - - Args: - - * src_grid_cube: - The :class:`~iris.cube.Cube` providing the source grid. - * tgt_grid_cube: - The :class:`~iris.cube.Cube` providing the target grid. - - Optional Args: - - * weights: - A :class:`numpy.ndarray` instance that defines the weights - for the grid cells of the source grid. Must have the same shape - as the data of the source grid. - If unspecified, equal weighting is assumed. - - """ - # Validity checks. - if not isinstance(src_grid_cube, iris.cube.Cube): - raise TypeError("'src_grid_cube' must be a Cube") - if not isinstance(target_grid_cube, iris.cube.Cube): - raise TypeError("'target_grid_cube' must be a Cube") - # Snapshot the state of the cubes to ensure that the regridder - # is impervious to external changes to the original source cubes. - self._src_cube = src_grid_cube.copy() - self._target_cube = target_grid_cube.copy() - self.weights = weights - self._regrid_info = None - - @staticmethod - def _get_horizontal_coord(cube, axis): - """ - Gets the horizontal coordinate on the supplied cube along the - specified axis. - - Args: - - * cube: - An instance of :class:`iris.cube.Cube`. - * axis: - Locate coordinates on `cube` along this axis. - - Returns: - The horizontal coordinate on the specified axis of the supplied - cube. - - """ - coords = cube.coords(axis=axis, dim_coords=False) - if len(coords) != 1: - raise ValueError('Cube {!r} must contain a single 1D {} ' - 'coordinate.'.format(cube.name()), axis) - return coords[0] - - def __call__(self, src): - """ - Regrid the supplied :class:`~iris.cube.Cube` on to the target grid of - this :class:`_CurvilinearRegridder`. - - The given cube must be defined with the same grid as the source - grid used to create this :class:`_CurvilinearRegridder`. - - Args: - - * src: - A :class:`~iris.cube.Cube` to be regridded. - - Returns: - A cube defined with the horizontal dimensions of the target - and the other dimensions from this cube. The data values of - this cube will be converted to values on the new grid using - point-in-cell regridding. - - """ - # Validity checks. - if not isinstance(src, iris.cube.Cube): - raise TypeError("'src' must be a Cube") - - gx = self._get_horizontal_coord(self._src_cube, 'x') - gy = self._get_horizontal_coord(self._src_cube, 'y') - src_grid = (gx.copy(), gy.copy()) - sx = self._get_horizontal_coord(src, 'x') - sy = self._get_horizontal_coord(src, 'y') - if (sx, sy) != src_grid: - raise ValueError('The given cube is not defined on the same ' - 'source grid as this regridder.') - - # Call the regridder function. - # This includes repeating over any non-XY dimensions, because the - # underlying routine does not support this. - # FOR NOW: we will use cube.slices and merge to achieve this, - # though that is not a terribly efficient method ... - # TODO: create a template result cube and paste data slices into it, - # which would be more efficient. - result_slices = iris.cube.CubeList([]) - for slice_cube in src.slices(sx): - if self._regrid_info is None: - # Calculate the basic regrid info just once. - self._regrid_info = \ - _regrid_weighted_curvilinear_to_rectilinear__prepare( - slice_cube, self.weights, self._target_cube) - slice_result = \ - _regrid_weighted_curvilinear_to_rectilinear__perform( - slice_cube, self._regrid_info) - result_slices.append(slice_result) - result = result_slices.merge_cube() - return result - - class PointInCell(object): """ This class describes the point-in-cell regridding scheme for use typically with :meth:`iris.cube.Cube.regrid()`. - The PointInCell regridder can regrid data from a source grid of any - dimensionality and in any coordinate system. - The location of each source point is specified by X and Y coordinates - mapped over the same cube dimensions, aka "grid dimensions" : the grid may - have any dimensionality. The X and Y coordinates must also have the same, - defined coord_system. - The weights, if specified, must have the same shape as the X and Y - coordinates. - The output grid can be any 'normal' XY grid, specified by *separate* X - and Y coordinates : That is, X and Y have two different cube dimensions. - The output X and Y coordinates must also have a common, specified - coord_system. + .. warning:: + + This class is now **disabled**. + + The functionality has been moved to + :class:`iris.analysis.PointInCell`. """ def __init__(self, weights=None): @@ -1285,44 +856,20 @@ def __init__(self, weights=None): Point-in-cell regridding scheme suitable for regridding over one or more orthogonal coordinates. - Optional Args: - - * weights: - A :class:`numpy.ndarray` instance that defines the weights - for the grid cells of the source grid. Must have the same shape - as the data of the source grid. - If unspecified, equal weighting is assumed. - - """ - self.weights = weights - - def regridder(self, src_grid, target_grid): - """ - Creates a point-in-cell regridder to perform regridding from the - source grid to the target grid. - - Typically you should use :meth:`iris.cube.Cube.regrid` for - regridding a cube. There are, however, some situations when - constructing your own regridder is preferable. These are detailed in - the :ref:`user guide `. - - Args: + .. warning:: - * src_grid: - The :class:`~iris.cube.Cube` defining the source grid. - * target_grid: - The :class:`~iris.cube.Cube` defining the target grid. + This class is now **disabled**. - Returns: - A callable with the interface: - - `callable(cube)` - - where `cube` is a cube with the same grid as `src_grid` - that is to be regridded to the `target_grid`. + The functionality has been moved to + :class:`iris.analysis.PointInCell`. """ - return _CurvilinearRegridder(src_grid, target_grid, self.weights) + raise Exception( + 'The class "iris.experimental.PointInCell" has been ' + 'moved, and is now in iris.analysis' + '\nPlease replace ' + '"iris.experimental.PointInCell" with ' + '"iris.analysis.PointInCell".') class _ProjectedUnstructuredRegridder(object): diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 5adef1f06e..de0bfb229e 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2018, Met Office +# (C) British Crown Copyright 2018 - 2019, Met Office # # This file is part of Iris. # @@ -207,7 +207,7 @@ def _make_shapes_row(self): for shape in self.shapes: cells.append( '{}'.format(shape)) - cells.append('') + cells.append('') return '\n'.join(cell for cell in cells) def _make_row(self, title, body=None, col_span=0): @@ -258,6 +258,12 @@ def _make_row(self, title, body=None, col_span=0): row.append('') return row + def _expand_last_cell(self, element, body): + '''Expand an element containing a cell by adding a new line.''' + split_point = element.index('') + element = element[:split_point] + '
    ' + body + element[split_point:] + return element + def _make_content(self): elements = [] for k, v in self.str_headings.items(): @@ -271,9 +277,23 @@ def _make_content(self): title = body.pop(0) colspan = 0 else: - split_point = line.index(':') - title = line[:split_point].strip() - body = line[split_point + 2:].strip() + try: + split_point = line.index(':') + except ValueError: + # When a line exists in v without a ':', we expect + # that this is due to the value of some attribute + # containing multiple lines. We collect all these + # lines in the same cell. + body = line.strip() + # We choose the element containing the last cell + # in the last row. + element = elements[-2] + element = self._expand_last_cell(element, body) + elements[-2] = element + continue + else: + title = line[:split_point].strip() + body = line[split_point + 2:].strip() colspan = self.ndims elements.extend( self._make_row(title, body=body, col_span=colspan)) @@ -306,3 +326,89 @@ def repr_html(self): id=self.cube_id, shape=shape, content=content) + + +class CubeListRepresentation(object): + _template = """ + + +{contents} + """ + + _accordian_panel = """ + +

    +

    {content}

    +
    + """ + + def __init__(self, cubelist): + self.cubelist = cubelist + self.cubelist_id = id(self.cubelist) + + def make_content(self): + html = [] + for i, cube in enumerate(self.cubelist): + title = '{i}: {summary}'.format(i=i, + summary=cube.summary(shorten=True)) + content = cube._repr_html_() + html.append(self._accordian_panel.format(uid=self.cubelist_id, + title=title, + content=content)) + return html + + def repr_html(self): + contents = self.make_content() + contents_str = '\n'.join(contents) + return self._template.format(uid=self.cubelist_id, + contents=contents_str) diff --git a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb index 5fc68e58c2..19fb2a0337 100644 --- a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb +++ b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -195,6 +195,47 @@ fc_provides_grid_mapping_albers_equal_area facts_cf.provides(coordinate_system, albers_equal_area) python engine.rule_triggered.add(rule.name) +# +# Context: +# This rule will trigger iff a grid_mapping() case specific fact +# has been asserted that refers to a vertical perspective. +# +# Purpose: +# Creates the vertical perspective coordinate system. +# +fc_provides_grid_mapping_vertical_perspective + foreach + facts_cf.grid_mapping($grid_mapping) + check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_VERTICAL) + assert + python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] + python coordinate_system = \ + build_vertical_perspective_coordinate_system(engine, cf_grid_var) + python engine.provides['coordinate_system'] = coordinate_system + facts_cf.provides(coordinate_system, vertical_perspective) + python engine.rule_triggered.add(rule.name) + +# +# Context: +# This rule will trigger iff a grid_mapping() case specific fact +# has been asserted that refers to a geostationary. +# +# Purpose: +# Creates the geostationary coordinate system. +# +fc_provides_grid_mapping_geostationary + foreach + facts_cf.grid_mapping($grid_mapping) + check is_grid_mapping(engine, $grid_mapping, + CF_GRID_MAPPING_GEOSTATIONARY) + assert + python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] + python coordinate_system = \ + build_geostationary_coordinate_system(engine, cf_grid_var) + python engine.provides['coordinate_system'] = coordinate_system + facts_cf.provides(coordinate_system, geostationary) + python engine.rule_triggered.add(rule.name) + # # Context: @@ -832,6 +873,85 @@ fc_build_coordinate_projection_y_albers_equal_area coord_system=engine.provides['coordinate_system']) python engine.rule_triggered.add(rule.name) +# +# Context: +# This rule will trigger iff a projection_x_coordinate coordinate exists and +# a vertical perspective coordinate system exists. +# +# Purpose: +# Add the projection_x_coordinate coordinate into the cube. +# +fc_build_coordinate_projection_x_vertical_perspective + foreach + facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) + facts_cf.provides(coordinate_system, vertical_perspective) + assert + python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] + python build_dimension_coordinate(engine, cf_coord_var, + coord_name=CF_VALUE_STD_NAME_PROJ_X, + coord_system=engine.provides['coordinate_system']) + python engine.rule_triggered.add(rule.name) + + +# +# Context: +# This rule will trigger iff a projection_y_coordinate coordinate exists and +# a vertical perspective coordinate system exists. +# +# Purpose: +# Add the projection_y_coordinate coordinate into the cube. +# +fc_build_coordinate_projection_y_vertical_perspective + foreach + facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) + facts_cf.provides(coordinate_system, vertical_perspective) + assert + python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] + python build_dimension_coordinate(engine, cf_coord_var, + coord_name=CF_VALUE_STD_NAME_PROJ_Y, + coord_system=engine.provides['coordinate_system']) + python engine.rule_triggered.add(rule.name) + +# +# Context: +# This rule will trigger iff a projection_x_coordinate coordinate exists and +# a geostationary coordinate system exists. +# +# Purpose: +# Add the projection_x_coordinate coordinate into the cube. +# +fc_build_coordinate_projection_x_geostationary + foreach + facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) + facts_cf.provides(coordinate_system, geostationary) + assert + python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] + python build_dimension_coordinate(engine, cf_coord_var, + coord_name=CF_VALUE_STD_NAME_PROJ_X, + coord_system=engine.provides['coordinate_system']) + python engine.rule_triggered.add(rule.name) + + +# +# Context: +# This rule will trigger iff a projection_y_coordinate coordinate exists and +# a geostationary coordinate system exists. +# +# Purpose: +# Add the projection_y_coordinate coordinate into the cube. +# +fc_build_coordinate_projection_y_geostationary + foreach + facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) + facts_cf.provides(coordinate_system, geostationary) + assert + python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] + python build_dimension_coordinate(engine, cf_coord_var, + coord_name=CF_VALUE_STD_NAME_PROJ_Y, + coord_system=engine.provides['coordinate_system']) + python engine.rule_triggered.add(rule.name) + + # # Context: # This rule will trigger iff a CF time coordinate exists. @@ -1075,6 +1195,7 @@ fc_extras import iris.exceptions import iris.std_names import iris.util + from iris._cube_coord_common import get_valid_standard_name from iris._lazy_data import as_lazy_data @@ -1119,6 +1240,7 @@ fc_extras CF_GRID_MAPPING_STEREO = 'stereographic' CF_GRID_MAPPING_TRANSVERSE = 'transverse_mercator' CF_GRID_MAPPING_VERTICAL = 'vertical_perspective' + CF_GRID_MAPPING_GEOSTATIONARY = 'geostationary' # # CF Attribute Names. @@ -1144,6 +1266,8 @@ fc_extras CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN = 'scale_factor_at_central_meridian' CF_ATTR_GRID_LON_OF_CENT_MERIDIAN = 'longitude_of_central_meridian' CF_ATTR_GRID_STANDARD_PARALLEL = 'standard_parallel' + CF_ATTR_GRID_PERSPECTIVE_HEIGHT = 'perspective_point_height' + CF_ATTR_GRID_SWEEP_ANGLE_AXIS = 'sweep_angle_axis' CF_ATTR_POSITIVE = 'positive' CF_ATTR_STD_NAME = 'standard_name' CF_ATTR_LONG_NAME = 'long_name' @@ -1186,9 +1310,9 @@ fc_extras cube.long_name = long_name if standard_name is not None: - if standard_name in iris.std_names.STD_NAMES: - cube.standard_name = standard_name - else: + try: + cube.standard_name = get_valid_standard_name(standard_name) + except ValueError: if cube.long_name is not None: cube.attributes['invalid_standard_name'] = standard_name else: @@ -1477,6 +1601,73 @@ fc_extras return cs + ################################################################################ + def build_vertical_perspective_coordinate_system(engine, cf_grid_var): + """ + Create a vertical perspective coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) + perspective_point_height = getattr( + cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None) + false_easting = getattr( + cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr( + cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + ellipsoid = None + if major is not None or minor is not None or \ + inverse_flattening is not None: + ellipsoid = iris.coord_systems.GeogCS(major, minor, + inverse_flattening) + + cs = iris.coord_systems.VerticalPerspective( + latitude_of_projection_origin, longitude_of_projection_origin, + perspective_point_height, false_easting, false_northing, ellipsoid) + + return cs + + ################################################################################ + def build_geostationary_coordinate_system(engine, cf_grid_var): + """ + Create a geostationary coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) + perspective_point_height = getattr( + cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None) + false_easting = getattr( + cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr( + cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + sweep_angle_axis = getattr( + cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None) + + ellipsoid = None + if major is not None or minor is not None or \ + inverse_flattening is not None: + ellipsoid = iris.coord_systems.GeogCS(major, minor, + inverse_flattening) + + cs = iris.coord_systems.Geostationary( + latitude_of_projection_origin, longitude_of_projection_origin, + perspective_point_height, sweep_angle_axis, false_easting, + false_northing, ellipsoid) + + return cs + ################################################################################ def get_attr_units(cf_var, attributes): attr_units = getattr(cf_var, CF_ATTR_UNITS, cf_units._UNIT_DIMENSIONLESS) @@ -1523,7 +1714,9 @@ fc_extras cf_name = str(cf_coord_var.cf_name) if standard_name is not None: - if standard_name not in iris.std_names.STD_NAMES: + try: + standard_name = get_valid_standard_name(standard_name) + except ValueError: if long_name is not None: attributes['invalid_standard_name'] = standard_name if coord_name is not None: @@ -1536,7 +1729,7 @@ fc_extras standard_name = coord_name else: standard_name = None - long_name = standard_name + else: if coord_name is not None: standard_name = coord_name @@ -1564,6 +1757,7 @@ fc_extras # variable is missing, as that will already have been done by # iris.fileformats.cf. cf_bounds_var = None + climatological = False if attr_bounds is not None: bounds_vars = cf_coord_var.cf_group.bounds if attr_bounds in bounds_vars: @@ -1572,13 +1766,14 @@ fc_extras climatology_vars = cf_coord_var.cf_group.climatology if attr_climatology in climatology_vars: cf_bounds_var = climatology_vars[attr_climatology] + climatological = True if attr_bounds is not None and attr_climatology is not None: warnings.warn('Ignoring climatology in favour of bounds attribute ' 'on NetCDF variable {!r}.'.format( cf_coord_var.cf_name)) - return cf_bounds_var + return cf_bounds_var, climatological ################################################################################ @@ -1627,7 +1822,8 @@ fc_extras warnings.warn(msg.format(str(cf_coord_var.cf_name))) # Get any coordinate bounds. - cf_bounds_var = get_cf_bounds_var(cf_coord_var) + cf_bounds_var, climatological = get_cf_bounds_var( + cf_coord_var) if cf_bounds_var is not None: bounds_data = cf_bounds_var[:] # Gracefully fill bounds masked array. @@ -1673,7 +1869,9 @@ fc_extras bounds=bounds_data, attributes=attributes, coord_system=coord_system, - circular=circular) + circular=circular, + climatological= + climatological) except ValueError as e_msg: # Attempt graceful loading. coord = iris.coords.AuxCoord(points_data, @@ -1683,7 +1881,9 @@ fc_extras units=attr_units, bounds=bounds_data, attributes=attributes, - coord_system=coord_system) + coord_system=coord_system, + climatological= + climatological) cube.add_aux_coord(coord, data_dims) msg = 'Failed to create {name!r} dimension coordinate: {error}\n' \ 'Gracefully creating {name!r} auxiliary coordinate instead.' @@ -1719,7 +1919,8 @@ fc_extras points_data = _get_cf_var_data(cf_coord_var, engine.filename) # Get any coordinate bounds. - cf_bounds_var = get_cf_bounds_var(cf_coord_var) + cf_bounds_var, climatological = get_cf_bounds_var( + cf_coord_var) if cf_bounds_var is not None: bounds_data = _get_cf_var_data(cf_bounds_var, engine.filename) @@ -1755,7 +1956,9 @@ fc_extras units=attr_units, bounds=bounds_data, attributes=attributes, - coord_system=coord_system) + coord_system=coord_system, + climatological= + climatological) # Add it to the cube cube.add_aux_coord(coord, data_dims) diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 061c65aa6a..17524717ff 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2012 - 2015, Met Office +# (C) British Crown Copyright 2012 - 2019, Met Office # # This file is part of Iris. # @@ -118,9 +118,9 @@ def to_cube(self): # Name. if self.format.lower() == "abf": - cube.rename("leaf_area_index") - elif self.format.lower() == "abl": cube.rename("FAPAR") + elif self.format.lower() == "abl": + cube.rename("leaf_area_index") else: msg = "Unknown ABF/ABL format: {}".format(self.format) raise iris.exceptions.TranslationError(msg) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index f2fc895d1c..8456925f58 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -30,7 +30,11 @@ import six from abc import ABCMeta, abstractmethod -from collections import Iterable, MutableMapping + +try: # Python 3 + from collections.abc import Iterable, MutableMapping +except ImportError: # Python 2.7 + from collections import Iterable, MutableMapping import os import re import warnings @@ -791,7 +795,8 @@ def identify(cls, variables, ignore=None, target=None, warn=True): measure = match_group['lhs'] variable_name = match_group['rhs'] - if variable_name not in ignore: + var_matches_nc = variable_name != nc_var_name + if variable_name not in ignore and var_matches_nc: if variable_name not in variables: if warn: message = 'Missing CF-netCDF measure variable %r, referenced by netCDF variable %r' diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 8c965ef1a5..e052b32174 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -100,7 +100,7 @@ # UKMO specific attributes that should not be global. _UKMO_DATA_ATTRS = ['STASH', 'um_stash_source', 'ukmo__process_flags'] -CF_CONVENTIONS_VERSION = 'CF-1.5' +CF_CONVENTIONS_VERSION = 'CF-1.7' _FactoryDefn = collections.namedtuple('_FactoryDefn', ('primary', 'std_name', 'formula_terms_format')) @@ -510,8 +510,10 @@ def _get_cf_var_data(cf_var, filename): netCDF4.default_fillvals[cf_var.dtype.str[1:]]) proxy = NetCDFDataProxy(cf_var.shape, dtype, filename, cf_var.cf_name, fill_value) + # Get the chunking specified for the variable : this is either a shape, or + # maybe the string "contiguous". chunks = cf_var.cf_data.chunking() - # Chunks can be an iterable, None, or `'contiguous'`. + # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. if chunks == 'contiguous': chunks = None return as_lazy_data(proxy, chunks=chunks) @@ -1448,13 +1450,21 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): else: bounds_dimension_name = 'bnds_%s' % n_bounds + if coord.climatological: + property_name = 'climatology' + varname_extra = 'climatology' + else: + property_name = 'bounds' + varname_extra = 'bnds' + if bounds_dimension_name not in self._dataset.dimensions: # Create the bounds dimension with the appropriate extent. self._dataset.createDimension(bounds_dimension_name, n_bounds) - _setncattr(cf_var, 'bounds', cf_name + '_bnds') + boundsvar_name = '{}_{}'.format(cf_name, varname_extra) + _setncattr(cf_var, property_name, boundsvar_name) cf_var_bounds = self._dataset.createVariable( - cf_var.bounds, bounds.dtype.newbyteorder('='), + boundsvar_name, bounds.dtype.newbyteorder('='), cf_var.dimensions + (bounds_dimension_name,)) cf_var_bounds[:] = bounds @@ -1888,6 +1898,35 @@ def add_ellipsoid(ellipsoid): cf_var_grid.false_northing = cs.false_northing cf_var_grid.standard_parallel = (cs.standard_parallels) + # vertical perspective + elif isinstance(cs, + iris.coord_systems.VerticalPerspective): + if cs.ellipsoid: + add_ellipsoid(cs.ellipsoid) + cf_var_grid.longitude_of_projection_origin = ( + cs.longitude_of_projection_origin) + cf_var_grid.latitude_of_projection_origin = ( + cs.latitude_of_projection_origin) + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + cf_var_grid.perspective_point_height = ( + cs.perspective_point_height) + + # geostationary + elif isinstance(cs, + iris.coord_systems.Geostationary): + if cs.ellipsoid: + add_ellipsoid(cs.ellipsoid) + cf_var_grid.longitude_of_projection_origin = ( + cs.longitude_of_projection_origin) + cf_var_grid.latitude_of_projection_origin = ( + cs.latitude_of_projection_origin) + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + cf_var_grid.perspective_point_height = ( + cs.perspective_point_height) + cf_var_grid.sweep_angle_axis = cs.sweep_angle_axis + # other else: warnings.warn('Unable to represent the horizontal ' diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 2b26b901a2..6cc54a61aa 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -619,17 +619,17 @@ def __init__(self, shape, src_dtype, path, offset, data_len, self.mdi = mdi # lbpack - def _lbpack_setter(self, value): - self._lbpack = value - - def _lbpack_getter(self): + @property + def lbpack(self): value = self._lbpack if not isinstance(self._lbpack, SplittableInt): mapping = dict(n5=slice(4, None), n4=3, n3=2, n2=1, n1=0) value = SplittableInt(self._lbpack, mapping) return value - lbpack = property(_lbpack_getter, _lbpack_setter) + @lbpack.setter + def lbpack(self, value): + self._lbpack = value @property def dtype(self): @@ -1004,17 +1004,25 @@ def lbtim(self, value): 'ic': 0}) # lbcode - def _lbcode_setter(self, new_value): + @property + def lbcode(self): + return self._lbcode + + @lbcode.setter + def lbcode(self, new_value): if not isinstance(new_value, SplittableInt): # add the ix/iy values for lbcode new_value = SplittableInt(new_value, {'iy': slice(0, 2), 'ix': slice(2, 4)}) self._lbcode = new_value - lbcode = property(lambda self: self._lbcode, _lbcode_setter) - # lbpack - def _lbpack_setter(self, new_value): + @property + def lbpack(self): + return self._lbpack + + @lbpack.setter + def lbpack(self, new_value): if not isinstance(new_value, SplittableInt): self.raw_lbpack = new_value # add the n1/n2/n3/n4/n5 values for lbpack @@ -1024,8 +1032,6 @@ def _lbpack_setter(self, new_value): self.raw_lbpack = new_value._value self._lbpack = new_value - lbpack = property(lambda self: self._lbpack, _lbpack_setter) - @property def lbproc(self): return self._lbproc @@ -1438,13 +1444,20 @@ class PPField2(PPField): __slots__ = _pp_attribute_names(HEADER_DEFN) - def _get_t1(self): + @property + def t1(self): + """ + A cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + and lbmin attributes. + + """ if not hasattr(self, '_t1'): self._t1 = cftime.datetime(self.lbyr, self.lbmon, self.lbdat, self.lbhr, self.lbmin) return self._t1 - def _set_t1(self, dt): + @t1.setter + def t1(self, dt): self.lbyr = dt.year self.lbmon = dt.month self.lbdat = dt.day @@ -1454,18 +1467,21 @@ def _set_t1(self, dt): if hasattr(self, '_t1'): delattr(self, '_t1') - t1 = property(_get_t1, _set_t1, None, - "A cftime.datetime object consisting of the lbyr, lbmon," - " lbdat, lbhr, and lbmin attributes.") + @property + def t2(self): + """ + A cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + lbhrd, and lbmind attributes. - def _get_t2(self): + """ if not hasattr(self, '_t2'): self._t2 = cftime.datetime(self.lbyrd, self.lbmond, self.lbdatd, self.lbhrd, self.lbmind) return self._t2 - def _set_t2(self, dt): + @t2.setter + def t2(self, dt): self.lbyrd = dt.year self.lbmond = dt.month self.lbdatd = dt.day @@ -1475,10 +1491,6 @@ def _set_t2(self, dt): if hasattr(self, '_t2'): delattr(self, '_t2') - t2 = property(_get_t2, _set_t2, None, - "A cftime.datetime object consisting of the lbyrd, " - "lbmond, lbdatd, lbhrd, and lbmind attributes.") - class PPField3(PPField): """ @@ -1491,13 +1503,20 @@ class PPField3(PPField): __slots__ = _pp_attribute_names(HEADER_DEFN) - def _get_t1(self): + @property + def t1(self): + """ + A cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + lbmin, and lbsec attributes. + + """ if not hasattr(self, '_t1'): self._t1 = cftime.datetime(self.lbyr, self.lbmon, self.lbdat, self.lbhr, self.lbmin, self.lbsec) return self._t1 - def _set_t1(self, dt): + @t1.setter + def t1(self, dt): self.lbyr = dt.year self.lbmon = dt.month self.lbdat = dt.day @@ -1507,18 +1526,21 @@ def _set_t1(self, dt): if hasattr(self, '_t1'): delattr(self, '_t1') - t1 = property(_get_t1, _set_t1, None, - "A cftime.datetime object consisting of the lbyr, lbmon," - " lbdat, lbhr, lbmin, and lbsec attributes.") + @property + def t2(self): + """ + A cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + lbhrd, lbmind, and lbsecd attributes. - def _get_t2(self): + """ if not hasattr(self, '_t2'): self._t2 = cftime.datetime(self.lbyrd, self.lbmond, self.lbdatd, self.lbhrd, self.lbmind, self.lbsecd) return self._t2 - def _set_t2(self, dt): + @t2.setter + def t2(self, dt): self.lbyrd = dt.year self.lbmond = dt.month self.lbdatd = dt.day @@ -1528,10 +1550,6 @@ def _set_t2(self, dt): if hasattr(self, '_t2'): delattr(self, '_t2') - t2 = property(_get_t2, _set_t2, None, - "A cftime.datetime object consisting of the lbyrd, " - "lbmond, lbdatd, lbhrd, lbmind, and lbsecd attributes.") - PP_CLASSES = { 2: PPField2, @@ -2079,7 +2097,9 @@ def save(cube, target, append=False, field_coords=None): If None, the final two dimensions are chosen for slicing. - See also :func:`iris.io.save`. + See also :func:`iris.io.save`. Note that :func:`iris.save` is the preferred + method of saving. This allows a :class:`iris.cube.CubeList` or a sequence + of cubes to be saved to a PP file. """ fields = as_fields(cube, field_coords, target) diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index cc568d7ccf..3fafa4df80 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2013 - 2018, Met Office +# (C) British Crown Copyright 2013 - 2019, Met Office # # This file is part of Iris. # @@ -16,11 +16,11 @@ # along with Iris. If not, see . # # DO NOT EDIT: AUTO-GENERATED -# Created on 01 June 2018 13:06 from +# Created on 22 May 2019 12:05 from # http://www.metarelate.net/metOcean -# at commit c07efb7ba0442332cbd5514c9d661c4f84a635c6 +# at commit a3649e942198f014f0258d926678dbbff616feae -# https://github.com/metarelate/metOcean/commit/c07efb7ba0442332cbd5514c9d661c4f84a635c6 +# https://github.com/metarelate/metOcean/commit/a3649e942198f014f0258d926678dbbff616feae """ Provides UM/CF phenomenon translations. @@ -720,7 +720,7 @@ 'm01s16i202': CFName('geopotential_height', None, 'm'), 'm01s16i203': CFName('air_temperature', None, 'K'), 'm01s16i204': CFName('relative_humidity', None, '%'), - 'm01s16i205': CFName(None, 'wet_bulb_potential_temperature', 'K'), + 'm01s16i205': CFName('wet_bulb_potential_temperature', None, 'K'), 'm01s16i222': CFName('air_pressure_at_sea_level', None, 'Pa'), 'm01s16i224': CFName(None, 'square_of_height', 'm2'), 'm01s16i255': CFName('geopotential_height', None, 'm'), diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index 1eafcefe1b..c96fbfa6f6 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2015, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -25,7 +25,7 @@ import matplotlib.pyplot as plt fagent = fp.FormatAgent() png_spec = fp.FormatSpecification('PNG image', fp.MagicNumber(8), - 0x89504E470D0A1A0A, + 0x89504E470D0A1A0A, handler=lambda filename: plt.imread(filename), priority=5 ) @@ -55,7 +55,10 @@ from six.moves import (filter, input, map, range, zip) # noqa import six -import collections +try: # Python 3 + from collections.abc import Callable +except ImportError: # Python 2.7 + from collections import Callable import functools import os import struct @@ -126,12 +129,12 @@ def get_spec(self, basename, buffer_obj): if buffer_obj is not None and buffer_obj.tell() != 0: # reset the buffer if tell != 0 buffer_obj.seek(0) - + element_cache[repr(fmt_elem)] = \ fmt_elem.get_element(basename, buffer_obj) # If we have a callable object, then call it and tests its result, otherwise test using basic equality - if isinstance(fmt_elem_value, collections.Callable): + if isinstance(fmt_elem_value, Callable): matches = fmt_elem_value(element_cache[repr(fmt_elem)]) elif element_cache[repr(fmt_elem)] == fmt_elem_value: matches = True @@ -255,14 +258,14 @@ def __init__(self, requires_fh=True): """ self.requires_fh = requires_fh - + def get_element(self, basename, file_handle): """Called when identifying the element of a file that this FileElement is representing.""" raise NotImplementedError("get_element must be defined in a subclass") - + def __hash__(self): return hash(repr(self)) - + def __repr__(self): return '{}()'.format(self.__class__.__name__) diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index 9828d8670d..37adebc1f5 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2015, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -22,7 +22,10 @@ from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa -import collections +try: # Python 3 + from collections.abc import Iterator +except ImportError: # Python 2.7 + from collections import Iterator import itertools import warnings @@ -167,7 +170,7 @@ def izip(*cubes, **kwargs): coords_by_cube) -class _ZipSlicesIterator(collections.Iterator): +class _ZipSlicesIterator(Iterator): """ Extension to _SlicesIterator (see cube.py) to support iteration over a collection of cubes in step. diff --git a/lib/iris/palette.py b/lib/iris/palette.py index 84fd665eab..8091757d75 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2015, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -197,10 +197,12 @@ def _update(self, val, update_min=True, update_max=True): if update_max: self._vmax = self.pivot + diff - def _get_vmin(self): + @property + def vmin(self): return getattr(self, '_vmin') - def _set_vmin(self, val): + @vmin.setter + def vmin(self, val): if val is None: self._vmin = None elif self._vmax is None: @@ -210,12 +212,12 @@ def _set_vmin(self, val): # Set both _vmin and _vmax from value self._update(val) - vmin = property(_get_vmin, _set_vmin) - - def _get_vmax(self): + @property + def vmax(self): return getattr(self, '_vmax') - def _set_vmax(self, val): + @vmax.setter + def vmax(self, val): if val is None: self._vmax = None elif self._vmin is None: @@ -225,8 +227,6 @@ def _set_vmax(self, val): # Set both _vmin and _vmax from value self._update(val) - vmax = property(_get_vmax, _set_vmax) - def _load_palette(): """ diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 88f89cc79a..dff7f1480d 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -741,7 +741,7 @@ def _draw_1d_from_points(draw_method_name, arg_func, *args, **kwargs): _string_coord_axis_tick_labels(string_axes, axes) # Invert y-axis if necessary. - _invert_yaxis(v_object) + _invert_yaxis(v_object, axes) return result @@ -821,6 +821,21 @@ def _ensure_cartopy_axes_and_determine_kwargs(x_coord, y_coord, kwargs): return new_kwargs +def _check_geostationary_coords_and_convert(x, y, kwargs): + # Geostationary stores projected coordinates as scanning angles ( + # radians), in line with CF definition (this behaviour is unique to + # Geostationary). Before plotting, must be converted by multiplying by + # satellite height. + x, y = (i.copy() for i in (x, y)) + transform = kwargs.get('transform') + if isinstance(transform, cartopy.crs.Geostationary): + satellite_height = transform.proj4_params['h'] + for i in (x, y): + i *= satellite_height + + return x, y + + def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwargs): """ @@ -873,12 +888,19 @@ def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, y = np.append(y, y[:, 0:1], axis=1) x = np.append(x, x[:, 0:1] + 360 * direction, axis=1) data = ma.concatenate([data, data[:, 0:1]], axis=1) + if '_v_data' in kwargs: + v_data = kwargs['_v_data'] + v_data = ma.concatenate([v_data, v_data[:, 0:1]], axis=1) + kwargs['_v_data'] = v_data # Replace non-cartopy subplot/axes with a cartopy alternative and set the # transform keyword. kwargs = _ensure_cartopy_axes_and_determine_kwargs(x_coord, y_coord, kwargs) + # Make Geostationary coordinates plot-able. + x, y = _check_geostationary_coords_and_convert(x, y, kwargs) + if arg_func is not None: new_args, kwargs = arg_func(x, y, data, *args, **kwargs) else: diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 5b760e1447..0ce09b6ef1 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -362,6 +362,12 @@ def assertCDL(self, netcdf_filename, reference_filename=None, flags='-h'): lines = cdl.decode('ascii').splitlines() lines = lines[1:] + # Ignore any lines of the general form "... :_NCProperties = ..." + # (an extra global attribute, displayed by older versions of ncdump). + re_ncprop = re.compile('^\s*:_NCProperties *=') + lines = [line for line in lines + if not re_ncprop.match(line)] + # Sort the dimensions (except for the first, which can be unlimited). # This gives consistent CDL across different platforms. sort_key = lambda line: ('UNLIMITED' not in line, line) diff --git a/lib/iris/tests/integration/plot/test_vector_plots.py b/lib/iris/tests/integration/plot/test_vector_plots.py index 497ff3549d..7785cc65ed 100644 --- a/lib/iris/tests/integration/plot/test_vector_plots.py +++ b/lib/iris/tests/integration/plot/test_vector_plots.py @@ -167,6 +167,25 @@ def test_fail_unsupported_coord_system(self): self.plot('2d_rotated', u_cube, v_cube, coords=('longitude', 'latitude')) + def test_circular_longitude(self): + # Test circular longitude does not cause a crash. + res = 5 + lat = DimCoord(np.arange(-90, 91, res), 'latitude', + units='degrees_north') + lon = DimCoord(np.arange(0, 360, res), 'longitude', + units='degrees_east', circular=True) + nlat = len(lat.points) + nlon = len(lon.points) + u_arr = np.ones((nlat, nlon)) + v_arr = np.ones((nlat, nlon)) + u_cube = Cube(u_arr, dim_coords_and_dims=[(lat, 0), (lon, 1)], + standard_name='eastward_wind') + v_cube = Cube(v_arr, dim_coords_and_dims=[(lat, 0), (lon, 1)], + standard_name='northward_wind') + + self.plot('circular', u_cube, v_cube, + coords=('longitude', 'latitude')) + class TestQuiver(MixinVectorPlotCases, tests.GraphicsTest): def setUp(self): diff --git a/lib/iris/tests/integration/test_climatology.py b/lib/iris/tests/integration/test_climatology.py new file mode 100644 index 0000000000..3e896dd273 --- /dev/null +++ b/lib/iris/tests/integration/test_climatology.py @@ -0,0 +1,119 @@ +# (C) British Crown Copyright 2014 - 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +"""Integration tests for loading and saving netcdf files.""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from os.path import join as path_join, dirname, sep as os_sep +import shutil +from subprocess import check_call +import tempfile + +import iris +from iris.tests import stock + + +class TestClimatology(iris.tests.IrisTest): + reference_cdl_path = os_sep.join([ + dirname(tests.__file__), + ('results/integration/climatology/TestClimatology/' + 'reference_simpledata.cdl')]) + + @classmethod + def _simple_cdl_string(cls): + with open(cls.reference_cdl_path, 'r') as f: + cdl_content = f.read() + # Add the expected CDL first line since this is removed from the + # stored results file. + cdl_content = 'netcdf {\n' + cdl_content + + return cdl_content + + @staticmethod + def _load_sanitised_cube(filepath): + cube = iris.load_cube(filepath) + # Remove attributes convention, if any. + cube.attributes.pop('Conventions', None) + # Remove any var-names. + for coord in cube.coords(): + coord.var_name = None + cube.var_name = None + return cube + + @classmethod + def setUpClass(cls): + # Create a temp directory for temp files. + cls.temp_dir = tempfile.mkdtemp() + cls.path_ref_cdl = path_join(cls.temp_dir, 'standard.cdl') + cls.path_ref_nc = path_join(cls.temp_dir, 'standard.nc') + # Create reference CDL file. + with open(cls.path_ref_cdl, 'w') as f_out: + f_out.write(cls._simple_cdl_string()) + # Create reference netCDF file from reference CDL. + command = 'ncgen -o {} {}'.format( + cls.path_ref_nc, cls.path_ref_cdl) + check_call(command, shell=True) + cls.path_temp_nc = path_join(cls.temp_dir, 'tmp.nc') + + # Create reference cube. + cls.cube_ref = stock.climatology_3d() + + @classmethod + def tearDownClass(cls): + # Destroy a temp directory for temp files. + shutil.rmtree(cls.temp_dir) + +############################################################################### + # Round-trip tests + + def test_cube_to_cube(self): + # Save reference cube to file, load cube from same file, test against + # reference cube. + iris.save(self.cube_ref, self.path_temp_nc) + cube = self._load_sanitised_cube(self.path_temp_nc) + self.assertEqual(cube, self.cube_ref) + + def test_file_to_file(self): + # Load cube from reference file, save same cube to file, test against + # reference CDL. + cube = iris.load_cube(self.path_ref_nc) + iris.save(cube, self.path_temp_nc) + self.assertCDL( + self.path_temp_nc, + reference_filename=self.reference_cdl_path, + flags='') + + # NOTE: + # The saving half of the round-trip tests is tested in the + # appropriate dedicated test class: + # unit.fileformats.netcdf.test_Saver.Test_write.test_with_climatology . + # The loading half has no equivalent dedicated location, so is tested + # here as test_load_from_file. + + def test_load_from_file(self): + # Create cube from file, test against reference cube. + cube = self._load_sanitised_cube(self.path_ref_nc) + self.assertEqual(cube, self.cube_ref) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py index 5351e615d8..4894de292c 100644 --- a/lib/iris/tests/integration/test_netcdf.py +++ b/lib/iris/tests/integration/test_netcdf.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2014 - 2017, Met Office +# (C) British Crown Copyright 2014 - 2019, Met Office # # This file is part of Iris. # @@ -18,6 +18,7 @@ from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa +import six # Import iris.tests first so that some things can be initialised before # importing anything else. @@ -273,6 +274,101 @@ def test_print(self): ' x x') in printed) +@tests.skip_data +class TestCMIP6VolcelloLoad(tests.IrisTest): + def setUp(self): + self.fname = tests.get_data_path( + ('NetCDF', 'volcello', + 'volcello_Ofx_CESM2_deforest-globe_r1i1p1f1_gn.nc')) + + def test_cmip6_volcello_load_issue_3367(self): + # Ensure that reading a file which references itself in + # `cell_measures` can be read. At the same time, ensure that we + # still receive a warning about other variables mentioned in + # `cell_measures` i.e. a warning should be raised about missing + # areacello. + areacello_str = "areacello" if six.PY3 else u"areacello" + volcello_str = "volcello" if six.PY3 else u"volcello" + expected_msg = "Missing CF-netCDF measure variable %r, " \ + "referenced by netCDF variable %r" \ + % (areacello_str, volcello_str) + + with mock.patch('warnings.warn') as warn: + # ensure file loads without failure + cube = iris.load_cube(self.fname) + warn.assert_has_calls([mock.call(expected_msg)]) + + # extra check to ensure correct variable was found + assert cube.standard_name == 'ocean_volume' + + +class TestSelfReferencingVarLoad(tests.IrisTest): + def setUp(self): + self.temp_dir_path = os.path.join(tempfile.mkdtemp(), + 'issue_3367_volcello_test_file.nc') + dataset = nc.Dataset(self.temp_dir_path, 'w') + + dataset.createDimension('lat', 4) + dataset.createDimension('lon', 5) + dataset.createDimension('lev', 3) + + latitudes = dataset.createVariable('lat', np.float64, ('lat',)) + longitudes = dataset.createVariable('lon', np.float64, ('lon',)) + levels = dataset.createVariable('lev', np.float64, ('lev',)) + volcello = dataset.createVariable('volcello', np.float32, + ('lat', 'lon', 'lev')) + + latitudes.standard_name = 'latitude' + latitudes.units = 'degrees_north' + latitudes.axis = 'Y' + latitudes[:] = np.linspace(-90, 90, 4) + + longitudes.standard_name = 'longitude' + longitudes.units = 'degrees_east' + longitudes.axis = 'X' + longitudes[:] = np.linspace(0, 360, 5) + + levels.standard_name = 'olevel' + levels.units = 'centimeters' + levels.positive = 'down' + levels.axis = 'Z' + levels[:] = np.linspace(0, 10**5, 3) + + volcello.id = 'volcello' + volcello.out_name = 'volcello' + volcello.standard_name = 'ocean_volume' + volcello.units = 'm3' + volcello.realm = 'ocean' + volcello.frequency = 'fx' + volcello.cell_measures = 'area: areacello volume: volcello' + volcello = np.arange(4*5*3).reshape((4, 5, 3)) + + dataset.close() + + def test_self_referencing_load_issue_3367(self): + # Ensure that reading a file which references itself in + # `cell_measures` can be read. At the same time, ensure that we + # still receive a warning about other variables mentioned in + # `cell_measures` i.e. a warning should be raised about missing + # areacello. + areacello_str = "areacello" if six.PY3 else u"areacello" + volcello_str = "volcello" if six.PY3 else u"volcello" + expected_msg = "Missing CF-netCDF measure variable %r, " \ + "referenced by netCDF variable %r" \ + % (areacello_str, volcello_str) + + with mock.patch('warnings.warn') as warn: + # ensure file loads without failure + cube = iris.load_cube(self.temp_dir_path) + warn.assert_called_with(expected_msg) + + # extra check to ensure correct variable was found + assert cube.standard_name == 'ocean_volume' + + def tearDown(self): + os.remove(self.temp_dir_path) + + class TestCellMethod_unknown(tests.IrisTest): def test_unknown_method(self): cube = Cube([1, 2], long_name='odd_phenomenon') @@ -429,5 +525,15 @@ def test_scalar_cube_save_load(self): self.assertEqual(scalar_cube.name(), 'scalar_cube') +class TestStandardName(tests.IrisTest): + def test_standard_name_roundtrip(self): + standard_name = 'air_temperature detection_minimum' + cube = iris.cube.Cube(1, standard_name=standard_name) + with self.temp_filename(suffix='.nc') as fout: + iris.save(cube, fout) + detection_limit_cube = iris.load_cube(fout) + self.assertEqual(detection_limit_cube.standard_name, standard_name) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/results/PP/extra_char_data.w_data_loaded.pp.txt b/lib/iris/tests/results/PP/extra_char_data.w_data_loaded.pp.txt deleted file mode 100644 index 9e1bfa95bf..0000000000 --- a/lib/iris/tests/results/PP/extra_char_data.w_data_loaded.pp.txt +++ /dev/null @@ -1,641 +0,0 @@ -[PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27870 - lbcode: 1 - lbhem: 0 - lbrow: 145 - lbnpt: 192 - lbext: 30 - lbpack: 0 - lbrel: 2 - lbfc: 56 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 5 - lblev: 1 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 897024, 0, 2, 0, 0, 1) - brsvd: (20.000338, 0.9977165, 0.0, 0.0) - bdatum: 0.0 - bacc: -12.0 - blev: 9.998206 - brlev: 0.0 - bhlev: 0.99885815 - bhrlev: 1.0 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -91.25 - bdy: 1.25 - bzx: -0.9375 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[ 0.8562012 0.9094238 0.9614258 ... 0.6916504 0.74731445 - 0.8022461 ] - [-0.29174805 -0.2397461 -0.18725586 ... -0.36645508 -0.34594727 - -0.32763672] - [-0.76000977 -0.6833496 -0.6347656 ... -0.9243164 -0.8911133 - -0.7675781 ] - ... - [-4.647461 -4.7456055 -4.8171387 ... -4.3222656 -4.428955 - -4.536133 ] - [-4.4577637 -4.5183105 -4.580078 ... -4.283203 -4.350342 - -4.4038086 ] - [-4.2226562 -4.284668 -4.342041 ... -4.01001 -4.085205 - -4.15625 ]] - field_title: AJHQA Time mean !C Atmos u compnt of wind after timestep at 9.998 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27678 - lbcode: 1 - lbhem: 0 - lbrow: 144 - lbnpt: 192 - lbext: 30 - lbpack: 0 - lbrel: 2 - lbfc: 57 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 6 - lblev: 1 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 1208320, 0, 3, 0, 0, 1) - brsvd: (20.000338, 0.9977165, 0.0, 0.0) - bdatum: 0.0 - bacc: -12.0 - blev: 9.998206 - brlev: 0.0 - bhlev: 0.99885815 - bhrlev: 1.0 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -90.625 - bdy: 1.25 - bzx: -1.875 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[-1.2304688 -1.2202148 -1.2077637 ... -1.2546387 -1.246582 - -1.2387695 ] - [-1.0026855 -1.0119629 -1.0195312 ... -0.9663086 -0.9802246 - -0.9904785 ] - [-0.76538086 -0.8845215 -1.0141602 ... -0.72143555 -0.7011719 - -0.71118164] - ... - [-2.1013184 -1.9470215 -1.7893066 ... -2.564209 -2.4177246 - -2.2590332 ] - [-2.0922852 -1.9360352 -1.7756348 ... -2.5288086 -2.3864746 - -2.2421875 ] - [-2.0959473 -1.9523926 -1.8071289 ... -2.5092773 -2.3747559 - -2.2368164 ]] - field_title: AJHQA Time mean !C Atmos v compnt of wind after timestep at 9.998 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27867 - lbcode: 1 - lbhem: 0 - lbrow: 145 - lbnpt: 192 - lbext: 27 - lbpack: 0 - lbrel: 2 - lbfc: 19 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 1 - lblev: 1 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 1519616, 0, 4, 0, 0, 1) - brsvd: (49.998882, 0.99429625, 0.0, 0.0) - bdatum: 0.0 - bacc: -10.0 - blev: 20.000338 - brlev: 0.0 - bhlev: 0.9977165 - bhrlev: 1.0 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -91.25 - bdy: 1.25 - bzx: -1.875 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[282.4619 282.4619 282.4619 ... 282.4619 282.4619 282.4619 ] - [282.3506 282.37598 282.40234 ... 282.27344 282.29883 282.3252 ] - [281.95508 282.03418 282.10938 ... 281.7578 281.81348 281.87988] - ... - [245.83203 245.84277 245.83398 ... 245.82031 245.82129 245.82324] - [244.42969 244.4248 244.42383 ... 244.45312 244.45215 244.44043] - [243.26758 243.26758 243.26758 ... 243.26758 243.26758 243.26758]] - field_title: AJHQA Time mean !C Atmos theta after timestep at 20.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27870 - lbcode: 1 - lbhem: 0 - lbrow: 145 - lbnpt: 192 - lbext: 30 - lbpack: 0 - lbrel: 2 - lbfc: 95 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 13 - lblev: 1 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 1789952, 0, 10, 0, 0, 1) - brsvd: (49.998882, 0.99429625, 0.0, 0.0) - bdatum: 0.0 - bacc: -99.0 - blev: 20.000338 - brlev: 0.0 - bhlev: 0.9977165 - bhrlev: 1.0 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -91.25 - bdy: 1.25 - bzx: -1.875 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[0.00079939 0.00079939 0.00079939 ... 0.00079939 0.00079939 0.00079939] - [0.00087261 0.00087106 0.00086934 ... 0.00087724 0.00087613 0.00087428] - [0.00093523 0.00092579 0.00091752 ... 0.00095657 0.00094989 0.00094373] - ... - [0.00037911 0.0003811 0.00038037 ... 0.00037897 0.00037865 0.0003793 ] - [0.00033554 0.0003354 0.00033541 ... 0.0003389 0.00033855 0.00033566] - [0.00030907 0.00030907 0.00030907 ... 0.00030907 0.00030907 0.00030907]] - field_title: AJHQA Time mean !C Atmos specific humidity after timestep at 20.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27870 - lbcode: 1 - lbhem: 0 - lbrow: 145 - lbnpt: 192 - lbext: 30 - lbpack: 0 - lbrel: 2 - lbfc: 56 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 5 - lblev: 2 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 905216, 0, 2, 0, 0, 1) - brsvd: (80.00135, 0.9908815, 0.0, 0.0) - bdatum: 0.0 - bacc: -12.0 - blev: 49.998882 - brlev: 20.000338 - bhlev: 0.99429625 - bhrlev: 0.9977165 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -91.25 - bdy: 1.25 - bzx: -0.9375 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[ 1.0332031 1.0991211 1.1638184 ... 0.82910156 0.89819336 - 0.96606445] - [-0.46777344 -0.41455078 -0.35766602 ... -0.5932617 -0.5517578 - -0.51293945] - [-1.072998 -1.005127 -0.9387207 ... -1.3034668 -1.2263184 - -1.1523438 ] - ... - [-5.9941406 -6.099365 -6.1816406 ... -5.6379395 -5.7575684 - -5.8745117 ] - [-5.8913574 -5.9609375 -6.027832 ... -5.675537 -5.7558594 - -5.8239746 ] - [-5.727051 -5.7910156 -5.848633 ... -5.4992676 -5.581299 - -5.6572266 ]] - field_title: AJHQA Time mean !C Atmos u compnt of wind after timestep at 50.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27678 - lbcode: 1 - lbhem: 0 - lbrow: 144 - lbnpt: 192 - lbext: 30 - lbpack: 0 - lbrel: 2 - lbfc: 57 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 6 - lblev: 2 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 1216512, 0, 3, 0, 0, 1) - brsvd: (80.00135, 0.9908815, 0.0, 0.0) - bdatum: 0.0 - bacc: -12.0 - blev: 49.998882 - brlev: 20.000338 - bhlev: 0.99429625 - bhrlev: 0.9977165 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -90.625 - bdy: 1.25 - bzx: -1.875 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[-1.5361328 -1.5249023 -1.5117188 ... -1.5610352 -1.5537109 - -1.5454102 ] - [-1.2714844 -1.2890625 -1.3078613 ... -1.2194824 -1.2355957 - -1.2526855 ] - [-1.0349121 -1.1855469 -1.3476562 ... -0.96240234 -0.94018555 - -0.9621582 ] - ... - [-2.333252 -2.1430664 -1.9562988 ... -2.888916 -2.708252 - -2.5219727 ] - [-2.2441406 -2.0427246 -1.8383789 ... -2.8112793 -2.6252441 - -2.4382324 ] - [-2.1965332 -2.0041504 -1.809082 ... -2.755127 -2.5720215 - -2.3859863 ]] - field_title: AJHQA Time mean !C Atmos v compnt of wind after timestep at 50.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27867 - lbcode: 1 - lbhem: 0 - lbrow: 145 - lbnpt: 192 - lbext: 27 - lbpack: 0 - lbrel: 2 - lbfc: 19 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 1 - lblev: 2 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 1527808, 0, 4, 0, 0, 1) - brsvd: (130.00023, 0.98520386, 0.0, 0.0) - bdatum: 0.0 - bacc: -10.0 - blev: 80.00135 - brlev: 49.998882 - bhlev: 0.9908815 - bhrlev: 0.99429625 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -91.25 - bdy: 1.25 - bzx: -1.875 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[282.4961 282.4961 282.4961 ... 282.4961 282.4961 282.4961 ] - [282.38672 282.4121 282.4375 ... 282.31152 282.33594 282.36133] - [282.0957 282.16992 282.2422 ... 281.9121 281.96582 282.02734] - ... - [246.62598 246.63086 246.625 ... 246.59863 246.60938 246.61816] - [245.46387 245.46582 245.4707 ... 245.45703 245.46191 245.46387] - [244.5625 244.5625 244.5625 ... 244.5625 244.5625 244.5625 ]] - field_title: AJHQA Time mean !C Atmos theta after timestep at 80.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27870 - lbcode: 1 - lbhem: 0 - lbrow: 145 - lbnpt: 192 - lbext: 30 - lbpack: 0 - lbrel: 2 - lbfc: 95 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 13 - lblev: 2 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 1818624, 0, 10, 0, 0, 1) - brsvd: (130.00023, 0.98520386, 0.0, 0.0) - bdatum: 0.0 - bacc: -99.0 - blev: 80.00135 - brlev: 49.998882 - bhlev: 0.9908815 - bhrlev: 0.99429625 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -91.25 - bdy: 1.25 - bzx: -1.875 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[0.00077913 0.00077913 0.00077913 ... 0.00077913 0.00077913 0.00077913] - [0.00085118 0.0008495 0.00084755 ... 0.00085498 0.00085392 0.00085248] - [0.00091165 0.00090317 0.00089486 ... 0.00092995 0.00092435 0.00091926] - ... - [0.00038609 0.00038648 0.00038594 ... 0.00038624 0.00038616 0.00038646] - [0.00034904 0.00034909 0.0003492 ... 0.0003502 0.00035007 0.00034903] - [0.00032891 0.00032891 0.00032891 ... 0.00032891 0.00032891 0.00032891]] - field_title: AJHQA Time mean !C Atmos specific humidity after timestep at 80.00 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27870 - lbcode: 1 - lbhem: 0 - lbrow: 145 - lbnpt: 192 - lbext: 30 - lbpack: 0 - lbrel: 2 - lbfc: 56 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 5 - lblev: 3 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 913408, 0, 2, 0, 0, 1) - brsvd: (179.99911, 0.97954255, 0.0, 0.0) - bdatum: 0.0 - bacc: -12.0 - blev: 130.00023 - brlev: 80.00135 - bhlev: 0.98520386 - bhrlev: 0.9908815 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -91.25 - bdy: 1.25 - bzx: -0.9375 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[ 1.0524902 1.1252441 1.1967773 ... 0.8273926 0.90356445 - 0.9785156 ] - [-0.6694336 -0.61328125 -0.5529785 ... -0.8195801 -0.7685547 - -0.72021484] - [-1.3225098 -1.2358398 -1.1459961 ... -1.5771484 -1.4953613 - -1.4130859 ] - ... - [-6.96875 -7.027832 -7.0776367 ... -6.741455 -6.8256836 - -6.900879 ] - [-7.010498 -7.0480957 -7.0776367 ... -6.8447266 -6.9067383 - -6.963135 ] - [-6.9716797 -7.010254 -7.04126 ... -6.8120117 -6.8725586 - -6.9257812 ]] - field_title: AJHQA Time mean !C Atmos u compnt of wind after timestep at 130.0 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -, PP Field - lbyr: 2007 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 336 - lbyrd: 2008 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 121 - lbft: 26280 - lblrec: 27678 - lbcode: 1 - lbhem: 0 - lbrow: 144 - lbnpt: 192 - lbext: 30 - lbpack: 0 - lbrel: 2 - lbfc: 57 - lbcfc: 0 - lbproc: 128 - lbvc: 65 - lbrvc: 0 - lbexp: 2388992 - lbegin: 0 - lbnrec: 0 - lbproj: 802 - lbtyp: 6 - lblev: 3 - lbrsvd: (0, 0, 0, 0) - lbsrce: 6061111 - lbuser: (1, 1224704, 0, 3, 0, 0, 1) - brsvd: (179.99911, 0.97954255, 0.0, 0.0) - bdatum: 0.0 - bacc: -12.0 - blev: 130.00023 - brlev: 80.00135 - bhlev: 0.98520386 - bhrlev: 0.9908815 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: -90.625 - bdy: 1.25 - bzx: -1.875 - bdx: 1.875 - bmdi: -1073741800.0 - bmks: 1.0 - data: [[-1.7414551 -1.7321777 -1.7211914 ... -1.7590332 -1.7546387 -1.7485352] - [-1.482666 -1.5065918 -1.5327148 ... -1.4162598 -1.4372559 -1.4589844] - [-1.3601074 -1.5227051 -1.6989746 ... -1.2714844 -1.2514648 -1.2753906] - ... - [-1.7216797 -1.4643555 -1.2097168 ... -2.4348145 -2.1984863 -1.9648438] - [-1.529541 -1.295166 -1.0639648 ... -2.2402344 -2.0048828 -1.7670898] - [-1.4748535 -1.2502441 -1.0231934 ... -2.137207 -1.9177246 -1.6970215]] - field_title: AJHQA Time mean !C Atmos v compnt of wind after timestep at 130.0 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00 -] \ No newline at end of file diff --git a/lib/iris/tests/results/abf/load.cml b/lib/iris/tests/results/abf/load.cml index 25be488650..e470cbebf3 100644 --- a/lib/iris/tests/results/abf/load.cml +++ b/lib/iris/tests/results/abf/load.cml @@ -1,6 +1,6 @@ - + diff --git a/lib/iris/tests/results/analysis/interpolation/linear/circular_vs_non_circular.cml b/lib/iris/tests/results/analysis/interpolation/linear/circular_vs_non_circular.cml deleted file mode 100644 index 594095f50b..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/circular_vs_non_circular.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/positive b/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/positive deleted file mode 100644 index 6654ef46a4..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/positive +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/positive.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/positive.data.0.json deleted file mode 100644 index 9bb2969bbd..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/positive.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 0.33973028517732512, "min": 0.0, "max": 1.1000000000000001, "shape": [3, 24], "masked": false, "mean": 0.55000000000000004} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/symmetric b/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/symmetric deleted file mode 100644 index 6654ef46a4..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/symmetric +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/symmetric.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/symmetric.data.0.json deleted file mode 100644 index 9bb2969bbd..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/circular_wrapping/symmetric.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 0.33973028517732512, "min": 0.0, "max": 1.1000000000000001, "shape": [3, 24], "masked": false, "mean": 0.55000000000000004} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/dim_to_aux.cml b/lib/iris/tests/results/analysis/interpolation/linear/dim_to_aux.cml deleted file mode 100644 index 5efc312a0f..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/dim_to_aux.cml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/real_2dslice.cml b/lib/iris/tests/results/analysis/interpolation/linear/real_2dslice.cml deleted file mode 100644 index 63703be654..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/real_2dslice.cml +++ /dev/null @@ -1,143 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/real_2slices.cml b/lib/iris/tests/results/analysis/interpolation/linear/real_2slices.cml deleted file mode 100644 index af035e9393..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/real_2slices.cml +++ /dev/null @@ -1,143 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/real_circular_2dslice.cml b/lib/iris/tests/results/analysis/interpolation/linear/real_circular_2dslice.cml deleted file mode 100644 index 2a9faedb3d..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/real_circular_2dslice.cml +++ /dev/null @@ -1,143 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_casting_datatype.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_casting_datatype.cml deleted file mode 100644 index ccd109ca9a..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_casting_datatype.cml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation.cml deleted file mode 100644 index fc5140841e..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation_multipoint1.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation_multipoint1.cml deleted file mode 100644 index 255fe34d7f..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation_multipoint1.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation_multipoint2.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation_multipoint2.cml deleted file mode 100644 index 46dfe870b8..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_linear_extrapolation_multipoint2.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_nan_extrapolation.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_nan_extrapolation.cml deleted file mode 100644 index 15ac105a91..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_coord_nan_extrapolation.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_coords.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_coords.cml deleted file mode 100644 index 03657be64c..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_coords.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_coords_extrapolation.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_coords_extrapolation.cml deleted file mode 100644 index 2f453b1330..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_coords_extrapolation.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_points.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_points.cml deleted file mode 100644 index 248848e38b..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_points.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_points_circular.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_points_circular.cml deleted file mode 100644 index 298bdf0f98..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_multiple_points_circular.cml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_shared_axis.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_shared_axis.cml deleted file mode 100644 index d4fd63d4dc..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_shared_axis.cml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/simple_single_point.cml b/lib/iris/tests/results/analysis/interpolation/linear/simple_single_point.cml deleted file mode 100644 index bf9f37d8a2..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/simple_single_point.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_0 b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_0 deleted file mode 100644 index ace42c84f8..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_0 +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_0.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_0.data.0.json deleted file mode 100644 index 526e4b6aab..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_0.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 3.2659863237109041, "min": 2.0, "max": 10.0, "shape": [3, 4], "masked": false, "mean": 6.0} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_1 b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_1 deleted file mode 100644 index 6e5a38c646..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_1 +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_1.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_1.data.0.json deleted file mode 100644 index b22387080a..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_1.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 1.1180339887498949, "min": 4.0, "max": 7.0, "shape": [4, 4], "masked": false, "mean": 5.5} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_nan b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_nan deleted file mode 100644 index fc9dc99eef..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_nan +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_nan.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_nan.data.0.json deleted file mode 100644 index c3bc9b5673..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_nan.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": NaN, "min": NaN, "max": NaN, "shape": [3, 3], "masked": false, "mean": NaN} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_same b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_same deleted file mode 100644 index 270ad5746b..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_same +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_same.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_same.data.0.json deleted file mode 100644 index e52d7576da..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_many_same.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 3.2659863237109041, "min": 2.0, "max": 10.0, "shape": [3, 3], "masked": false, "mean": 6.0} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_same_pt b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_same_pt deleted file mode 100644 index a16308e53f..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_same_pt +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_same_pt.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_same_pt.data.0.json deleted file mode 100644 index 083821afc1..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_same_pt.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 3.2659863237109041, "min": 2.0, "max": 10.0, "shape": [3, 1], "masked": false, "mean": 6.0} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_0 b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_0 deleted file mode 100644 index fe1edd72e7..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_0 +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_0.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_0.data.0.json deleted file mode 100644 index 6b64ed5556..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_0.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 3.2659863237109041, "min": 2.0, "max": 10.0, "shape": [3], "masked": false, "mean": 6.0} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_1 b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_1 deleted file mode 100644 index 7e8e447cc1..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_1 +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_1.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_1.data.0.json deleted file mode 100644 index 7d9a7486b6..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_1.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 1.1180339887498949, "min": 4.0, "max": 7.0, "shape": [4], "masked": false, "mean": 5.5} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_nan b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_nan deleted file mode 100644 index 48fdd173da..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_nan +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_nan.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_nan.data.0.json deleted file mode 100644 index 28e588dc39..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_scalar_nan.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": NaN, "min": NaN, "max": NaN, "shape": [3], "masked": false, "mean": NaN} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_0 b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_0 deleted file mode 100644 index 21078b9bb8..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_0 +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_0.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_0.data.0.json deleted file mode 100644 index 083821afc1..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_0.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 3.2659863237109041, "min": 2.0, "max": 10.0, "shape": [3, 1], "masked": false, "mean": 6.0} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_1 b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_1 deleted file mode 100644 index c0a3808c71..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_1 +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_1.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_1.data.0.json deleted file mode 100644 index 6eb7f12a11..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_1.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 1.1180339887498949, "min": 4.0, "max": 7.0, "shape": [1, 4], "masked": false, "mean": 5.5} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_nan b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_nan deleted file mode 100644 index 54764eb0a2..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_nan +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_nan.data.0.json b/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_nan.data.0.json deleted file mode 100644 index ba31ec5d67..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/linear/single_pt_to_single_pt_nan.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": NaN, "min": NaN, "max": NaN, "shape": [3, 1], "masked": false, "mean": NaN} \ No newline at end of file diff --git a/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_bounded.cml b/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_bounded.cml deleted file mode 100644 index 2a354e7397..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_bounded.cml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_bounded_mid_point.cml b/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_bounded_mid_point.cml deleted file mode 100644 index 2a354e7397..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_bounded_mid_point.cml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_latitude.cml b/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_latitude.cml deleted file mode 100644 index 96568028eb..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_latitude.cml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_latitude_longitude.cml b/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_latitude_longitude.cml deleted file mode 100644 index 0bd5fd46d8..0000000000 --- a/lib/iris/tests/results/analysis/interpolation/nearest_neighbour_extract_latitude_longitude.cml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/cube_to_pp/user_rules.txt b/lib/iris/tests/results/cube_to_pp/user_rules.txt deleted file mode 100644 index 9cac1047e0..0000000000 --- a/lib/iris/tests/results/cube_to_pp/user_rules.txt +++ /dev/null @@ -1,67 +0,0 @@ -[PP Field - lbyr: 1998 - lbmon: 12 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbsec: 0 - lbyrd: 1998 - lbmond: 3 - lbdatd: 6 - lbhrd: 3 - lbmind: 0 - lbsecd: 0 - lbtim: 11 - lbft: 6477 - lblrec: 7008 - lbcode: 1 - lbhem: 0 - lbrow: 73 - lbnpt: 96 - lbext: 0 - lbpack: 0 - lbrel: 3 - lbfc: 16 - lbcfc: 0 - lbproc: 0 - lbvc: 8 - lbrvc: 0 - lbexp: 0 - lbegin: 0 - lbnrec: 0 - lbproj: 0 - lbtyp: 0 - lblev: 0 - lbrsvd: (0, 0, 0, 0) - lbsrce: 1111 - lbuser: (1, -99, 0, 9222, 0, 0, 1) - brsvd: (0.0, 0.0, 0.0, 0.0) - bdatum: 0.0 - bacc: 0.0 - blev: 1000.0 - brlev: 0.0 - bhlev: 0.0 - bhrlev: 0.0 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: 92.5 - bdy: -2.5 - bzx: -3.75 - bdx: 3.75 - bmdi: -1e+30 - bmks: 1.0 - data: [[ 254.64399719 254.64399719 254.64399719 ..., 254.64399719 - 254.64399719 254.64399719] - [ 254.97499084 254.96766663 254.9808197 ..., 254.98776245 - 255.03466797 254.99720764] - [ 255.99584961 256.30834961 256.55874634 ..., 256.11685181 - 255.95605469 255.83555603] - ..., - [ 254.57333374 253.86854553 252.90516663 ..., 255.29681396 - 255.38789368 255.14859009] - [ 252.33691406 252.08926392 251.85536194 ..., 252.59863281 - 252.67466736 252.58959961] - [ 248.74584961 248.74584961 248.74584961 ..., 248.74584961 - 248.74584961 248.74584961]] -] \ No newline at end of file diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd.cml b/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd.cml deleted file mode 100644 index 05a31d7eda..0000000000 --- a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd.cml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd.data.0.json b/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd.data.0.json deleted file mode 100644 index 6778a301c3..0000000000 --- a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 3.4520525932312012, "min": 6.0, "max": 17.0, "shape": [3, 4], "masked": false, "mean": 11.5} \ No newline at end of file diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_2_coords.cml b/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_2_coords.cml deleted file mode 100644 index d35209f137..0000000000 --- a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_2_coords.cml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_2_coords.data.0.json b/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_2_coords.data.0.json deleted file mode 100644 index b85c1c8353..0000000000 --- a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_2_coords.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 6.0, "min": 7.5, "max": 19.5, "shape": [2], "masked": false, "mean": 13.5} \ No newline at end of file diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_with_extrapolation.cml b/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_with_extrapolation.cml deleted file mode 100644 index 41bf5bfa0b..0000000000 --- a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_with_extrapolation.cml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_with_extrapolation.data.0.json b/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_with_extrapolation.data.0.json deleted file mode 100644 index 1c18659523..0000000000 --- a/lib/iris/tests/results/experimental/analysis/interpolate/linear_nd_with_extrapolation.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 3.4520523548126221, "min": -5.1000003814697266, "max": 5.899998664855957, "shape": [3, 4], "masked": false, "mean": 0.39999952912330627} \ No newline at end of file diff --git a/lib/iris/tests/results/file_load/4d_pp.dot b/lib/iris/tests/results/file_load/4d_pp.dot deleted file mode 100644 index 1e73188817..0000000000 --- a/lib/iris/tests/results/file_load/4d_pp.dot +++ /dev/null @@ -1,121 +0,0 @@ - -digraph CubeGraph{ - - rankdir = "LR" - fontname = "Bitstream Vera Sans" - fontsize = 8 - - node [ - fontname = "Bitstream Vera Sans" - fontsize = 8 - shape = "record" - ] - -# Nodes - ":Cube" [ - label = "Cube|source: Iris test case" - ] - - - subgraph clusterCubeDimensions { - label="Cube data" - - "CubeDimension_0" [ - label = "0|len: 6" - ] - - "CubeDimension_1" [ - label = "1|len: 70" - ] - - "CubeDimension_2" [ - label = "2|len: 100" - ] - - "CubeDimension_3" [ - label = "3|len: 100" - ] - - } - - - subgraph clusterCoords { - label = "Coords" - "Coord_0" [ - label = "AuxCoord|standard_name: altitude\nlong_name: None\nunits: m\npositive: up" - ] - "Coord_1" [ - label = "DimCoord|standard_name: forecast_period\nlong_name: None\nunits: hours\ncircular: False" - ] - "Coord_2" [ - label = "DimCoord|standard_name: grid_latitude\nlong_name: None\nunits: degrees\ncircular: False" - ] - "Coord_3" [ - label = "DimCoord|standard_name: grid_longitude\nlong_name: None\nunits: degrees\ncircular: False" - ] - "Coord_4" [ - label = "DimCoord|standard_name: None\nlong_name: level_height\nunits: m\ncircular: False\npositive: up" - ] - "Coord_5" [ - label = "DimCoord|standard_name: model_level_number\nlong_name: None\nunits: 1\ncircular: False\npositive: up" - ] - "Coord_6" [ - label = "AuxCoord|standard_name: None\nlong_name: sigma\nunits: 1" - ] - "Coord_7" [ - label = "AuxCoord|standard_name: surface_altitude\nlong_name: None\nunits: m" - ] - "Coord_8" [ - label = "DimCoord|standard_name: time\nlong_name: None\nunits: hours since 1970-01-01 00:00:00\ncircular: False" - ] - - } - - - subgraph clusterCoordSystems { - label = "CoordSystems" - "CoordSystem_RotatedGeogCS_0" [ - label = "RotatedGeogCS|ellipsoid: GeogCS(6371229.0)\ngrid_north_pole_latitude: 37.5\ngrid_north_pole_longitude: 177.5\nnorth_pole_grid_longitude: 0.0" - ] - - } - - edge [ - arrowhead = "normal" - ] - -# RELATIONSHIPS - -# Containment - - ":Cube" -> "Coord_0" - ":Cube" -> "Coord_1" - "Coord_2" -> "CoordSystem_RotatedGeogCS_0" - ":Cube" -> "Coord_2" - "Coord_3" -> "CoordSystem_RotatedGeogCS_0" - ":Cube" -> "Coord_3" - ":Cube" -> "Coord_4" - ":Cube" -> "Coord_5" - ":Cube" -> "Coord_6" - ":Cube" -> "Coord_7" - ":Cube" -> "Coord_8" - edge [ - style="dashed" - arrowhead = "onormal" - ] - -# Association - - "Coord_0" -> "CubeDimension_1":w - "Coord_0" -> "CubeDimension_2":w - "Coord_0" -> "CubeDimension_3":w - "Coord_2" -> "CubeDimension_2":w - "Coord_3" -> "CubeDimension_3":w - "Coord_4" -> "CubeDimension_1":w - "Coord_5" -> "CubeDimension_1":w - "Coord_6" -> "CubeDimension_1":w - "Coord_7" -> "CubeDimension_2":w - "Coord_7" -> "CubeDimension_3":w - "Coord_8" -> "CubeDimension_0":w -} - \ No newline at end of file diff --git a/lib/iris/tests/results/file_load/coord_attributes.dot b/lib/iris/tests/results/file_load/coord_attributes.dot deleted file mode 100644 index 8fa6b8930c..0000000000 --- a/lib/iris/tests/results/file_load/coord_attributes.dot +++ /dev/null @@ -1,92 +0,0 @@ - -digraph CubeGraph{ - - rankdir = "LR" - fontname = "Bitstream Vera Sans" - fontsize = 8 - - node [ - fontname = "Bitstream Vera Sans" - fontsize = 8 - shape = "record" - ] - -# Nodes - ":Cube" [ - label = "Cube|STASH: m01s16i203\nmy_attribute: foobar\nsource: Data from Met Office Unified Model" - ] - - - subgraph clusterCubeDimensions { - label="Cube data" - - "CubeDimension_0" [ - label = "0|len: 73" - ] - - "CubeDimension_1" [ - label = "1|len: 96" - ] - - } - - - subgraph clusterCoords { - label = "Coords" - "Coord_0" [ - label = "DimCoord|standard_name: forecast_period\nlong_name: None\nunits: hours\ncircular: False" - ] - "Coord_1" [ - label = "DimCoord|standard_name: forecast_reference_time\nlong_name: None\nunits: hours since 1970-01-01 00:00:00\ncircular: False" - ] - "Coord_2" [ - label = "DimCoord|standard_name: latitude\nlong_name: None\nunits: degrees\ncircular: False" - ] - "Coord_3" [ - label = "DimCoord|standard_name: longitude\nlong_name: None\nunits: degrees\ncircular: True" - ] - "Coord_4" [ - label = "DimCoord|standard_name: None\nlong_name: pressure\nunits: hPa\ncircular: False" - ] - "Coord_5" [ - label = "DimCoord|standard_name: time\nlong_name: None\nunits: hours since 1970-01-01 00:00:00\ncircular: False\nbrain: hurts\nmonty: python" - ] - - } - - - subgraph clusterCoordSystems { - label = "CoordSystems" - "CoordSystem_GeogCS_0" [ - label = "GeogCS|inverse_flattening: 0.0\nlongitude_of_prime_meridian: 0.0\nsemi_major_axis: 6371229.0\nsemi_minor_axis: 6371229.0" - ] - - } - - edge [ - arrowhead = "normal" - ] - -# RELATIONSHIPS - -# Containment - - ":Cube" -> "Coord_0" - ":Cube" -> "Coord_1" - "Coord_2" -> "CoordSystem_GeogCS_0" - ":Cube" -> "Coord_2" - "Coord_3" -> "CoordSystem_GeogCS_0" - ":Cube" -> "Coord_3" - ":Cube" -> "Coord_4" - ":Cube" -> "Coord_5" - edge [ - style="dashed" - arrowhead = "onormal" - ] - -# Association - - "Coord_2" -> "CubeDimension_0":w - "Coord_3" -> "CubeDimension_1":w -} - \ No newline at end of file diff --git a/lib/iris/tests/results/file_load/global_pp.dot b/lib/iris/tests/results/file_load/global_pp.dot deleted file mode 100644 index 9c5a21b379..0000000000 --- a/lib/iris/tests/results/file_load/global_pp.dot +++ /dev/null @@ -1,92 +0,0 @@ - -digraph CubeGraph{ - - rankdir = "LR" - fontname = "Bitstream Vera Sans" - fontsize = 8 - - node [ - fontname = "Bitstream Vera Sans" - fontsize = 8 - shape = "record" - ] - -# Nodes - ":Cube" [ - label = "Cube|STASH: m01s16i203\nmy_attribute: foobar\nsource: Data from Met Office Unified Model" - ] - - - subgraph clusterCubeDimensions { - label="Cube data" - - "CubeDimension_0" [ - label = "0|len: 73" - ] - - "CubeDimension_1" [ - label = "1|len: 96" - ] - - } - - - subgraph clusterCoords { - label = "Coords" - "Coord_0" [ - label = "DimCoord|standard_name: forecast_period\nlong_name: None\nunits: hours\ncircular: False" - ] - "Coord_1" [ - label = "DimCoord|standard_name: forecast_reference_time\nlong_name: None\nunits: hours since 1970-01-01 00:00:00\ncircular: False" - ] - "Coord_2" [ - label = "DimCoord|standard_name: latitude\nlong_name: None\nunits: degrees\ncircular: False" - ] - "Coord_3" [ - label = "DimCoord|standard_name: longitude\nlong_name: None\nunits: degrees\ncircular: True" - ] - "Coord_4" [ - label = "DimCoord|standard_name: None\nlong_name: pressure\nunits: hPa\ncircular: False" - ] - "Coord_5" [ - label = "DimCoord|standard_name: time\nlong_name: None\nunits: hours since 1970-01-01 00:00:00\ncircular: False" - ] - - } - - - subgraph clusterCoordSystems { - label = "CoordSystems" - "CoordSystem_GeogCS_0" [ - label = "GeogCS|inverse_flattening: 0.0\nlongitude_of_prime_meridian: 0.0\nsemi_major_axis: 6371229.0\nsemi_minor_axis: 6371229.0" - ] - - } - - edge [ - arrowhead = "normal" - ] - -# RELATIONSHIPS - -# Containment - - ":Cube" -> "Coord_0" - ":Cube" -> "Coord_1" - "Coord_2" -> "CoordSystem_GeogCS_0" - ":Cube" -> "Coord_2" - "Coord_3" -> "CoordSystem_GeogCS_0" - ":Cube" -> "Coord_3" - ":Cube" -> "Coord_4" - ":Cube" -> "Coord_5" - edge [ - style="dashed" - arrowhead = "onormal" - ] - -# Association - - "Coord_2" -> "CubeDimension_0":w - "Coord_3" -> "CubeDimension_1":w -} - \ No newline at end of file diff --git a/lib/iris/tests/results/grib_load/y_fastest.cml b/lib/iris/tests/results/grib_load/y_fastest.cml deleted file mode 100644 index c111999c06..0000000000 --- a/lib/iris/tests/results/grib_load/y_fastest.cml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 540743366d..3a5f0ab17d 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -83,6 +83,9 @@ "https://scitools.github.io/test-iris-imagehash/images/v4/eae942146540b869961f8de694589da69543cc9af1014afbc3fd596b84fe19a7.png", "https://scitools.github.io/test-iris-imagehash/images/v4/eafd9e12a5a061e9925ec716de489e9685078ec981b229e70ddb79219cc3768d.png" ], + "example_tests.test_load_nemo.TestLoadNemo.test_load_nemo.0": [ + "https://scitools.github.io/test-iris-imagehash/images/v4/a3ff34e87f0049496d17c4d9c04fc225d256971392d39f1696df0f16cec00f36.png" + ], "example_tests.test_orca_projection.TestOrcaProjection.test_orca_projection.0": [ "https://scitools.github.io/test-iris-imagehash/images/v4/fb11731a94cea4ee64b35e91d1d2304e9e5ac7397b20e1fe12852487e666ce46.png", "https://scitools.github.io/test-iris-imagehash/images/v4/bb11721a87cce5e4cce79e81d19b3b5e1e1cd3783168e07835853485e65e2e1e.png" diff --git a/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl b/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl new file mode 100644 index 0000000000..1740926645 --- /dev/null +++ b/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl @@ -0,0 +1,56 @@ +dimensions: + bnds = 2 ; + latitude = 3 ; + longitude = 5 ; + time = 4 ; +variables: + byte climatology_test(time, latitude, longitude) ; + climatology_test:long_name = "climatology test" ; + climatology_test:units = "Kelvin" ; + climatology_test:cell_methods = "time: mean over years" ; + double time(time) ; + time:axis = "T" ; + time:climatology = "time_climatology" ; + time:units = "days since 1970-01-01 00:00:00-00" ; + time:standard_name = "time" ; + time:calendar = "gregorian" ; + double time_climatology(time, bnds) ; + double latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "1" ; + latitude:standard_name = "latitude" ; + double longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "1" ; + longitude:standard_name = "longitude" ; + +// global attributes: + :Conventions = "CF-1.7" ; +data: + + climatology_test = + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0 ; + + time = 11332, 11333, 11334, 11335 ; + + time_climatology = + 11332, 14984, + 11333, 14985, + 11334, 14986, + 11335, 14987 ; + + latitude = 0, 30, 60 ; + + longitude = -25, -12.5, 0, 12.5, 25 ; +} diff --git a/lib/iris/tests/results/integration/grib2/TestImport/gdt90_with_bitmap.cml b/lib/iris/tests/results/integration/grib2/TestImport/gdt90_with_bitmap.cml index deb9774b3f..9f950b5e1f 100644 --- a/lib/iris/tests/results/integration/grib2/TestImport/gdt90_with_bitmap.cml +++ b/lib/iris/tests/results/integration/grib2/TestImport/gdt90_with_bitmap.cml @@ -6,16 +6,16 @@ - - + - - + diff --git a/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl b/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl index 7cf343549d..88c5fc18fe 100644 --- a/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl @@ -62,5 +62,5 @@ variables: // global attributes: :source = "Iris test case" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl b/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl index 287cbe5358..37dafe4745 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl @@ -64,5 +64,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl b/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl index d498520f70..a3c90bf1f8 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl @@ -66,5 +66,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl index 34446083e2..65da679ad0 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl @@ -46,5 +46,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl index 34446083e2..65da679ad0 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl @@ -46,5 +46,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl index 43037a36c5..d7a39d72de 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl @@ -46,5 +46,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl b/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl index 1d680136d2..5ff22a679b 100644 --- a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl @@ -74,5 +74,5 @@ variables: // global attributes: :source = "Iris test case" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml b/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml index a6bf903419..4d37f856ad 100644 --- a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml +++ b/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml @@ -2,7 +2,7 @@ - + @@ -66,7 +66,7 @@ - + diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl b/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl index a7a9538dfd..a22044c2d9 100644 --- a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl @@ -11,5 +11,5 @@ variables: air_pressure:um_version = "4.4" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl index 59c94491bd..be35bfd590 100644 --- a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl @@ -10,5 +10,5 @@ variables: // global attributes: :um_version = "4.3" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl index 671c9d03e3..0399f82349 100644 --- a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl +++ b/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl @@ -7,5 +7,5 @@ variables: // global attributes: :um_version = "4.3" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadPP/simple.cml b/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadPP/simple.cml deleted file mode 100644 index 4da5286d78..0000000000 --- a/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadPP/simple.cml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadPP/simple_callback.cml b/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadPP/simple_callback.cml deleted file mode 100644 index 1614dad71a..0000000000 --- a/lib/iris/tests/results/integration/um/fieldsfile/TestStructuredLoadPP/simple_callback.cml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/name/NAMEII_timeseries.data.1.json b/lib/iris/tests/results/name/NAMEII_timeseries.data.1.json index 936e74deb3..053375d220 100644 --- a/lib/iris/tests/results/name/NAMEII_timeseries.data.1.json +++ b/lib/iris/tests/results/name/NAMEII_timeseries.data.1.json @@ -1 +1 @@ -{"std": 0.00017462574781585265, "min": 0.0, "max": 0.00099772017000000009, "shape": [132], "masked": false, "mean": 9.5217456948642194e-05} \ No newline at end of file +{"std": 0.00017462574781585265, "min": 0.0, "max": 0.00099772017, "shape": [132], "masked": false, "mean": 9.52174569486422e-05} \ No newline at end of file diff --git a/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml index 0a28f4fb7e..39cb8f2950 100644 --- a/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/int64_auxiliary_coord_netcdf3.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/int64_data_netcdf3.cml b/lib/iris/tests/results/netcdf/int64_data_netcdf3.cml index 608769f4c3..55d7260f12 100644 --- a/lib/iris/tests/results/netcdf/int64_data_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/int64_data_netcdf3.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml index 5c523ac16b..1c59fc947e 100644 --- a/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/int64_dimension_coord_netcdf3.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/multi_dim_coord_slightly_different.cdl b/lib/iris/tests/results/netcdf/multi_dim_coord_slightly_different.cdl index 42592d9c3e..b771a13e00 100644 --- a/lib/iris/tests/results/netcdf/multi_dim_coord_slightly_different.cdl +++ b/lib/iris/tests/results/netcdf/multi_dim_coord_slightly_different.cdl @@ -25,5 +25,5 @@ variables: temp3_0:units = "K" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_conf_aux.cdl b/lib/iris/tests/results/netcdf/netcdf_save_conf_aux.cdl index c331ce9b84..f7ee6603b3 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_conf_aux.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_conf_aux.cdl @@ -17,5 +17,5 @@ variables: time_0:standard_name = "time" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_conf_name.cdl b/lib/iris/tests/results/netcdf/netcdf_save_conf_name.cdl index 2104322bea..949fe40926 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_conf_name.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_conf_name.cdl @@ -17,5 +17,5 @@ variables: time_0:standard_name = "time" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_confl_attr.cdl b/lib/iris/tests/results/netcdf/netcdf_save_confl_attr.cdl index deba7f5ef7..25b02b0f17 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_confl_attr.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_confl_attr.cdl @@ -14,5 +14,5 @@ variables: temp2:foo = "orange" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_confl_global_attr.cdl b/lib/iris/tests/results/netcdf/netcdf_save_confl_global_attr.cdl index e2d41987d9..1ec4c4b045 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_confl_global_attr.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_confl_global_attr.cdl @@ -14,5 +14,5 @@ variables: temp2:history = "Team B won." ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_gridmapmulti.cdl b/lib/iris/tests/results/netcdf/netcdf_save_gridmapmulti.cdl index ff74fcdd2c..c998f129d3 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_gridmapmulti.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_gridmapmulti.cdl @@ -60,5 +60,5 @@ variables: grid_latitude:long_name = "3" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl b/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl index f6dcee0e22..1863d1ee7d 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl @@ -71,5 +71,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; :um_version = "7.4" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml index a89756a77e..8e4a005d44 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml +++ b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml index e4f880f4c8..13582b3106 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml +++ b/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_save_multi_0.cdl b/lib/iris/tests/results/netcdf/netcdf_save_multi_0.cdl index 27cf62a962..f4f8d6e88a 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_multi_0.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_multi_0.cdl @@ -46,5 +46,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_multi_1.cdl b/lib/iris/tests/results/netcdf/netcdf_save_multi_1.cdl index 7377f94592..50222b796e 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_multi_1.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_multi_1.cdl @@ -46,5 +46,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_multi_2.cdl b/lib/iris/tests/results/netcdf/netcdf_save_multi_2.cdl index 553566355c..7761a3c45d 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_multi_2.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_multi_2.cdl @@ -42,5 +42,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_multiple.cdl b/lib/iris/tests/results/netcdf/netcdf_save_multiple.cdl index 141c1ba5db..7ad1818bb6 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_multiple.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_multiple.cdl @@ -60,5 +60,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl b/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl index 27e4b13e00..32d4163d01 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl @@ -49,5 +49,5 @@ variables: :history = "Thu Nov 29 10:45:50 2012: /project/ukmo/rhel6/nco/bin/ncks -d time,0,3 new_rotPole_precipitation.nc small_rotPole_precipitation.nc" ; :institution = "DMI" ; :source = "HIRHAM" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_no_global_attr.cdl b/lib/iris/tests/results/netcdf/netcdf_save_no_global_attr.cdl index b10abf5a4a..d66b865c30 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_no_global_attr.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_no_global_attr.cdl @@ -44,5 +44,5 @@ variables: temp3_0:h = "v" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_no_name.cdl b/lib/iris/tests/results/netcdf/netcdf_save_no_name.cdl index dd8ee12e6a..e67316b2f7 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_no_name.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_no_name.cdl @@ -13,5 +13,5 @@ variables: unknown_scalar:units = "no_unit" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_nocoord.cdl b/lib/iris/tests/results/netcdf/netcdf_save_nocoord.cdl index 05ad948c52..fbec1e301f 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_nocoord.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_nocoord.cdl @@ -15,5 +15,5 @@ variables: temp3:units = "K" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl index a5be3b454c..0e3ae7e715 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl @@ -54,5 +54,5 @@ variables: // global attributes: :source = "Iris test case" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl index f30eb67952..601ea11719 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl @@ -62,5 +62,5 @@ variables: // global attributes: :source = "Iris test case" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl index a061484318..b86a77aa62 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl @@ -59,5 +59,5 @@ variables: // global attributes: :source = "Iris test case" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_samedimcoord.cdl b/lib/iris/tests/results/netcdf/netcdf_save_samedimcoord.cdl index 2b40c45b3a..56bea53d5f 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_samedimcoord.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_samedimcoord.cdl @@ -19,5 +19,5 @@ variables: temp3:units = "K" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_samevar.cdl b/lib/iris/tests/results/netcdf/netcdf_save_samevar.cdl index e7b2115ca8..7e8225825c 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_samevar.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_samevar.cdl @@ -12,5 +12,5 @@ variables: temp_0:units = "K" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_single.cdl b/lib/iris/tests/results/netcdf/netcdf_save_single.cdl index 5edaf8bfde..e45496521c 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_single.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_single.cdl @@ -44,5 +44,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_save_wcoord.cdl b/lib/iris/tests/results/netcdf/netcdf_save_wcoord.cdl index d0f8844199..b93af15f2b 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_wcoord.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_wcoord.cdl @@ -19,5 +19,5 @@ variables: dim0:long_name = "Rnd Coordinate" ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml b/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml index 2776b2018c..2d909ba57e 100644 --- a/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml +++ b/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml @@ -62,7 +62,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/save_load_traj.cml b/lib/iris/tests/results/netcdf/save_load_traj.cml index 44cbc28513..7f8b3d7e99 100644 --- a/lib/iris/tests/results/netcdf/save_load_traj.cml +++ b/lib/iris/tests/results/netcdf/save_load_traj.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml index 0a28f4fb7e..39cb8f2950 100644 --- a/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/uint32_auxiliary_coord_netcdf3.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/uint32_data_netcdf3.cml b/lib/iris/tests/results/netcdf/uint32_data_netcdf3.cml index 608769f4c3..55d7260f12 100644 --- a/lib/iris/tests/results/netcdf/uint32_data_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/uint32_data_netcdf3.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml b/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml index 5c523ac16b..1c59fc947e 100644 --- a/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml +++ b/lib/iris/tests/results/netcdf/uint32_dimension_coord_netcdf3.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/regrid/airpress_on_theta_0d.data.0.json b/lib/iris/tests/results/regrid/airpress_on_theta_0d.data.0.json deleted file mode 100644 index bb55e1e464..0000000000 --- a/lib/iris/tests/results/regrid/airpress_on_theta_0d.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 0.0, "min": 102192.140625, "max": 102192.140625, "shape": [], "masked": false, "mean": 102192.140625} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/airpress_on_theta_1d.data.0.json b/lib/iris/tests/results/regrid/airpress_on_theta_1d.data.0.json deleted file mode 100644 index 40af5a5f32..0000000000 --- a/lib/iris/tests/results/regrid/airpress_on_theta_1d.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 185.38343811035156, "min": 99609.65625, "max": 100038.796875, "shape": [3], "masked": false, "mean": 99781.375} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/airpress_on_theta_2d.data.0.json b/lib/iris/tests/results/regrid/airpress_on_theta_2d.data.0.json deleted file mode 100644 index 5b4f8a1724..0000000000 --- a/lib/iris/tests/results/regrid/airpress_on_theta_2d.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 1048.704345703125, "min": 98272.953125, "max": 101113.84375, "shape": [2, 3], "masked": false, "mean": 99580.7421875} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/airpress_on_theta_3d.data.0.json b/lib/iris/tests/results/regrid/airpress_on_theta_3d.data.0.json deleted file mode 100644 index 59f6885bf3..0000000000 --- a/lib/iris/tests/results/regrid/airpress_on_theta_3d.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 1126.344970703125, "min": 97080.71875, "max": 101113.84375, "shape": [5, 2, 3], "masked": false, "mean": 99043.671875} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/bilinear_larger.data.0.json b/lib/iris/tests/results/regrid/bilinear_larger.data.0.json deleted file mode 100644 index ed0557ad4f..0000000000 --- a/lib/iris/tests/results/regrid/bilinear_larger.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 4.6904158592224121, "min": -2.5, "max": 13.5, "shape": [4, 5], "masked": false, "mean": 5.5} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/bilinear_larger_lon_extrapolate_left.data.0.json b/lib/iris/tests/results/regrid/bilinear_larger_lon_extrapolate_left.data.0.json deleted file mode 100644 index ec738b52bd..0000000000 --- a/lib/iris/tests/results/regrid/bilinear_larger_lon_extrapolate_left.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 4.6518816947937012, "min": -2.5, "max": 13.0, "shape": [4, 5], "masked": false, "mean": 5.0999999046325684} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/bilinear_larger_lon_extrapolate_right.data.0.json b/lib/iris/tests/results/regrid/bilinear_larger_lon_extrapolate_right.data.0.json deleted file mode 100644 index ca948c42df..0000000000 --- a/lib/iris/tests/results/regrid/bilinear_larger_lon_extrapolate_right.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 4.6518816947937012, "min": -2.0, "max": 13.5, "shape": [4, 5], "masked": false, "mean": 5.9000000953674316} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/bilinear_smaller.data.0.json b/lib/iris/tests/results/regrid/bilinear_smaller.data.0.json deleted file mode 100644 index e3adacb147..0000000000 --- a/lib/iris/tests/results/regrid/bilinear_smaller.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 2.1602468490600586, "min": 2.5, "max": 8.5, "shape": [2, 3], "masked": false, "mean": 5.5} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/bilinear_smaller_lon_align_left.data.0.json b/lib/iris/tests/results/regrid/bilinear_smaller_lon_align_left.data.0.json deleted file mode 100644 index b60594f0a5..0000000000 --- a/lib/iris/tests/results/regrid/bilinear_smaller_lon_align_left.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 2.1602468490600586, "min": 2.0, "max": 8.0, "shape": [2, 3], "masked": false, "mean": 5.0} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/bilinear_smaller_lon_align_right.data.0.json b/lib/iris/tests/results/regrid/bilinear_smaller_lon_align_right.data.0.json deleted file mode 100644 index 74786e568a..0000000000 --- a/lib/iris/tests/results/regrid/bilinear_smaller_lon_align_right.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 2.1602468490600586, "min": 3.0, "max": 9.0, "shape": [2, 3], "masked": false, "mean": 6.0} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/low_med_high.cml b/lib/iris/tests/results/regrid/low_med_high.cml deleted file mode 100644 index fb2c5bd3cf..0000000000 --- a/lib/iris/tests/results/regrid/low_med_high.cml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/regrid/low_med_high.data.0.json b/lib/iris/tests/results/regrid/low_med_high.data.0.json deleted file mode 100644 index d64741ce40..0000000000 --- a/lib/iris/tests/results/regrid/low_med_high.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 3.7925365302569376, "min": 0.0, "max": 11.0, "shape": [5, 6], "masked": false, "mean": 5.5} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/low_med_high.data.1.json b/lib/iris/tests/results/regrid/low_med_high.data.1.json deleted file mode 100644 index fb58052aab..0000000000 --- a/lib/iris/tests/results/regrid/low_med_high.data.1.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 6.0184900284225957, "min": 0.0, "max": 19.0, "shape": [5, 6], "masked": false, "mean": 8.3333333333333339} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/low_med_high.data.2.json b/lib/iris/tests/results/regrid/low_med_high.data.2.json deleted file mode 100644 index ac1c1a2b8c..0000000000 --- a/lib/iris/tests/results/regrid/low_med_high.data.2.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 8.6554414483991895, "min": 0.0, "max": 29.0, "shape": [5, 6], "masked": false, "mean": 14.5} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/theta_on_airpress_0d.data.0.json b/lib/iris/tests/results/regrid/theta_on_airpress_0d.data.0.json deleted file mode 100644 index 292ae75826..0000000000 --- a/lib/iris/tests/results/regrid/theta_on_airpress_0d.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 0.0, "min": 282.12796020507812, "max": 282.12796020507812, "shape": [], "masked": false, "mean": 282.12796020507812} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/theta_on_airpress_1d.data.0.json b/lib/iris/tests/results/regrid/theta_on_airpress_1d.data.0.json deleted file mode 100644 index 0ad78fb933..0000000000 --- a/lib/iris/tests/results/regrid/theta_on_airpress_1d.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 0.20830129086971283, "min": 282.77932739257812, "max": 283.23013305664062, "shape": [4], "masked": false, "mean": 283.0235595703125} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/theta_on_airpress_2d.data.0.json b/lib/iris/tests/results/regrid/theta_on_airpress_2d.data.0.json deleted file mode 100644 index 1e19a7c045..0000000000 --- a/lib/iris/tests/results/regrid/theta_on_airpress_2d.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 0.89102786779403687, "min": 281.37478637695312, "max": 284.1317138671875, "shape": [4, 4], "masked": false, "mean": 282.7205810546875} \ No newline at end of file diff --git a/lib/iris/tests/results/regrid/theta_on_airpress_3d.data.0.json b/lib/iris/tests/results/regrid/theta_on_airpress_3d.data.0.json deleted file mode 100644 index 62fbe8491a..0000000000 --- a/lib/iris/tests/results/regrid/theta_on_airpress_3d.data.0.json +++ /dev/null @@ -1 +0,0 @@ -{"std": 1.0716584920883179, "min": 281.37478637695312, "max": 285.50057983398438, "shape": [5, 4, 4], "masked": false, "mean": 283.49774169921875} \ No newline at end of file diff --git a/lib/iris/tests/results/system/supported_filetype_.nc.cml b/lib/iris/tests/results/system/supported_filetype_.nc.cml index c752ff3464..595cd287ae 100644 --- a/lib/iris/tests/results/system/supported_filetype_.nc.cml +++ b/lib/iris/tests/results/system/supported_filetype_.nc.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/trajectory/big_cube.cml b/lib/iris/tests/results/trajectory/big_cube.cml deleted file mode 100644 index e8db4005ec..0000000000 --- a/lib/iris/tests/results/trajectory/big_cube.cml +++ /dev/null @@ -1,99 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/unit/fileformats/grib/load_cubes/load_cubes/reduced_raw.cml b/lib/iris/tests/results/unit/fileformats/grib/load_cubes/load_cubes/reduced_raw.cml deleted file mode 100644 index b4f279914c..0000000000 --- a/lib/iris/tests/results/unit/fileformats/grib/load_cubes/load_cubes/reduced_raw.cml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl new file mode 100644 index 0000000000..3646627746 --- /dev/null +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl @@ -0,0 +1,26 @@ +dimensions: + bnds = 2 ; + latitude = 3 ; + longitude = 5 ; + time = 4 ; +variables: + byte climatology_test(time, latitude, longitude) ; + climatology_test:long_name = "climatology test" ; + climatology_test:units = "Kelvin" ; + climatology_test:cell_methods = "time: mean over years" ; + double time(time) ; + time:axis = "T" ; + time:climatology = "time_climatology" ; + time:units = "days since 1970-01-01 00:00:00-00" ; + time:standard_name = "time" ; + time:calendar = "gregorian" ; + double time_climatology(time, bnds) ; + double latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "1" ; + latitude:standard_name = "latitude" ; + double longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "1" ; + longitude:standard_name = "longitude" ; +} diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml index e251bc4e5b..1f9dfb0a14 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml index 86a3f9b50d..06c192f8a4 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml index 8b77cb62a1..9b654f6c6e 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cml index fe90919bb0..3a55c44f2f 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cml index d3f6e1cf4a..1ab309af4e 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml index 390d94cbc8..3ea688d1fa 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml index d5e55c4516..829c7ce38e 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/12187.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/12187.b_0.cml index 9948c8e433..5a7a6441a4 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/12187.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/12187.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cml index 885944c4f0..cf7b207be9 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_level_lat_orig.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_level_lat_orig.b_0.cml index e956bfcb70..51ab62f9aa 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_level_lat_orig.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_level_lat_orig.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_press_orig.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_press_orig.b_0.cml index 5fa7a949b2..55a60a7cd6 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_press_orig.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_press_orig.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_several.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_several.b_0.cml index ddb11831fc..2736fe9aa6 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_several.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_lon_lat_several.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_n10r13xy.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_n10r13xy.b_0.cml index 9c341b4898..8c4ee7df19 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_n10r13xy.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_n10r13xy.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_time_press.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_time_press.b_0.cml index 98bbd4e2f2..83f7502ba5 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_time_press.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_time_press.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_tseries.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_tseries.b_0.cml index 958a241fd4..fb6fa8a599 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_tseries.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/aaxzc_tseries.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_0.cml index 1c9c58d6d0..f2c30b37ef 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_1.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_1.cml index 7228163b23..cc5f574799 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_1.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_1.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_2.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_2.cml index cb5c754080..9fe3e1cb1c 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_2.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abcza_pa19591997_daily_29.b_2.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abxpa_press_lat.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abxpa_press_lat.b_0.cml index bd4411c1b3..71c005b916 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abxpa_press_lat.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/abxpa_press_lat.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/integer.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/integer.b_0.cml index 53bcfc067d..642dadc721 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/integer.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/integer.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/model.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/model.b_0.cml index 186c1f6f48..f0bb9dc293 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/model.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/model.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/ocean_xsect.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/ocean_xsect.b_0.cml index 8f01a6c707..5549d7cebe 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/ocean_xsect.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/ocean_xsect.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml index c67d6f4ce3..4f84609832 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc699.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml index 5dc6fb5c9c..caafa5845c 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st0fc942.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st30211.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st30211.b_0.cml index 1c803423b3..ffcf430c02 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st30211.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/st30211.b_0.cml @@ -2,7 +2,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl index a523f32b78..429da0807b 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl @@ -45,5 +45,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl index a523f32b78..429da0807b 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl @@ -45,5 +45,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl index ab34087917..f1c94dc834 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl @@ -45,5 +45,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cdl index 531f8f2e7d..e8f3f04d7d 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.16.202.000128.1860.09.01.00.00.b_0.cdl @@ -46,5 +46,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cdl index 862a8c66ae..cb3a3bc2eb 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/001000000000.00.000.000000.1860.01.01.00.00.f.b_0.cdl @@ -38,5 +38,5 @@ variables: double time_bnds(bnds) ; // global attributes: - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cdl index 9a1d5b8d8a..40ea329140 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/002000000000.44.101.131200.1920.09.01.00.00.b_0.cdl @@ -40,5 +40,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cdl index 299adc662e..692c01c76f 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/008000000000.44.101.000128.1890.09.01.00.00.b_0.cdl @@ -19,5 +19,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/12187.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/12187.b_0.cdl index 2044e2bc06..20607d69ba 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/12187.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/12187.b_0.cdl @@ -59,5 +59,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; :um_version = "6.1" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cdl index 43530865bd..8a9498abce 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/HadCM2_ts_SAT_ann_18602100.b_0.cdl @@ -39,5 +39,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_level_lat_orig.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_level_lat_orig.b_0.cdl index dfbf6839b0..f9450c58ff 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_level_lat_orig.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_level_lat_orig.b_0.cdl @@ -43,5 +43,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_lon_lat_press_orig.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_lon_lat_press_orig.b_0.cdl index ffff50f425..8d513798fd 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_lon_lat_press_orig.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_lon_lat_press_orig.b_0.cdl @@ -48,5 +48,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_lon_lat_several.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_lon_lat_several.b_0.cdl index 18ab8d865e..de372487af 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_lon_lat_several.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_lon_lat_several.b_0.cdl @@ -47,5 +47,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_n10r13xy.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_n10r13xy.b_0.cdl index 4ca612826d..105f25201f 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_n10r13xy.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_n10r13xy.b_0.cdl @@ -45,5 +45,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_time_press.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_time_press.b_0.cdl index 0a19ff646a..9cd7a53739 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_time_press.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_time_press.b_0.cdl @@ -20,5 +20,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_tseries.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_tseries.b_0.cdl index 1e72aac281..b864df3ad8 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_tseries.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/aaxzc_tseries.b_0.cdl @@ -24,5 +24,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_0.cdl index 00659bb8ad..d3beb8f273 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_0.cdl @@ -47,5 +47,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_1.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_1.cdl index 8fdedb8d3c..5e6e974110 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_1.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_1.cdl @@ -47,5 +47,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_2.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_2.cdl index 85c9b348cf..01ca9c1493 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_2.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abcza_pa19591997_daily_29.b_2.cdl @@ -43,5 +43,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abxpa_press_lat.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abxpa_press_lat.b_0.cdl index fea73514b1..6f6cf00d82 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abxpa_press_lat.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/abxpa_press_lat.b_0.cdl @@ -41,5 +41,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/integer.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/integer.b_0.cdl index f5fdbbd0e2..443b44e9bf 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/integer.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/integer.b_0.cdl @@ -40,5 +40,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/model.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/model.b_0.cdl index 9beb597e69..bf505b032a 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/model.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/model.b_0.cdl @@ -48,5 +48,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/ocean_xsect.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/ocean_xsect.b_0.cdl index 1af330948d..ff46155154 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/ocean_xsect.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/ocean_xsect.b_0.cdl @@ -44,5 +44,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st0fc699.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st0fc699.b_0.cdl index 2667dc8766..48f6f0c835 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st0fc699.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st0fc699.b_0.cdl @@ -39,5 +39,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st0fc942.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st0fc942.b_0.cdl index bb1c0d212e..4514f23858 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st0fc942.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st0fc942.b_0.cdl @@ -49,5 +49,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st30211.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st30211.b_0.cdl index d722a55c33..cefed0f94c 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st30211.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/st30211.b_0.cdl @@ -47,5 +47,5 @@ variables: // global attributes: :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.5" ; + :Conventions = "CF-1.7" ; } diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_pp/001000000000.00.000.000000.1860.01.01.00.00.f.b.pp.txt b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_pp/001000000000.00.000.000000.1860.01.01.00.00.f.b.pp.txt deleted file mode 100644 index 8368dc5763..0000000000 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_pp/001000000000.00.000.000000.1860.01.01.00.00.f.b.pp.txt +++ /dev/null @@ -1,118 +0,0 @@ -[PP Field - lbyr: 1860 - lbmon: 1 - lbdat: 1 - lbhr: 0 - lbmin: 0 - lbday: 1 - lbyrd: 1870 - lbmond: 1 - lbdatd: 1 - lbhrd: 0 - lbmind: 0 - lbdayd: 1 - lbtim: 22 - lbft: 0 - lblrec: 12256 - lbcode: 1 - lbhem: 3 - lbrow: 94 - lbnpt: 128 - lbext: 224 - lbpack: 0 - lbrel: 2 - lbfc: 608 - lbcfc: 0 - lbproc: 0 - lbvc: 0 - lbrvc: 0 - lbexp: 0 - lbegin: 0 - lbnrec: 0 - lbproj: 0 - lbtyp: 0 - lblev: 0 - lbrsvd: (0, 0, 0, 0) - lbsrce: 0 - lbuser: (1, 0, 0, 0, 0, 0, 0) - brsvd: (0.0, 0.0, 0.0, 0.0) - bdatum: 0.0 - bacc: 0.0 - blev: 0.0 - brlev: 0.0 - bhlev: 0.0 - bhrlev: 0.0 - bplat: 90.0 - bplon: 0.0 - bgor: 0.0 - bzy: 0.0 - bdy: 0.0 - bzx: 0.0 - bdx: 0.0 - bmdi: -1e+30 - bmks: 1.0 - data: [[ -1.00000002e+30 -1.00000002e+30 -1.00000002e+30 ..., -1.00000002e+30 - -1.00000002e+30 -1.00000002e+30] - [ -1.00000002e+30 -1.00000002e+30 -1.00000002e+30 ..., -1.00000002e+30 - -1.00000002e+30 -1.00000002e+30] - [ -1.00000002e+30 -1.00000002e+30 -1.00000002e+30 ..., -1.00000002e+30 - -1.00000002e+30 -1.00000002e+30] - ..., - [ -1.05353355e+00 -1.08272266e+00 -1.11123657e+00 ..., -9.79792595e-01 - -1.00221825e+00 -1.02663898e+00] - [ -1.01000309e+00 -1.02563190e+00 -1.04178810e+00 ..., -9.66852188e-01 - -9.79707718e-01 -9.94375229e-01] - [ -9.67420578e-01 -9.70564842e-01 -9.72410202e-01 ..., -9.55687523e-01 - -9.60104942e-01 -9.64031219e-01]] - x: [ 7.15255737e-07 2.81250215e+00 5.62500334e+00 8.43750477e+00 - 1.12500057e+01 1.40625076e+01 1.68750076e+01 1.96875095e+01 - 2.25000114e+01 2.53125114e+01 2.81250114e+01 3.09375153e+01 - 3.37500153e+01 3.65625191e+01 3.93750191e+01 4.21875229e+01 - 4.50000000e+01 4.78125000e+01 5.06250000e+01 5.34374847e+01 - 5.62499847e+01 5.90624847e+01 6.18749886e+01 6.46874924e+01 - 6.74999847e+01 7.03124847e+01 7.31250000e+01 7.59374924e+01 - 7.87499847e+01 8.15624924e+01 8.43749924e+01 8.71875000e+01 - 9.00000000e+01 9.28125000e+01 9.56250000e+01 9.84375076e+01 - 1.01250008e+02 1.04062508e+02 1.06875008e+02 1.09687500e+02 - 1.12500000e+02 1.15312508e+02 1.18125015e+02 1.20937515e+02 - 1.23750015e+02 1.26562515e+02 1.29375031e+02 1.32187500e+02 - 1.34999985e+02 1.37812469e+02 1.40624969e+02 1.43437485e+02 - 1.46249985e+02 1.49062500e+02 1.51875000e+02 1.54687469e+02 - 1.57499969e+02 1.60312485e+02 1.63124985e+02 1.65937485e+02 - 1.68749985e+02 1.71562500e+02 1.74375000e+02 1.77187500e+02 - 1.80000000e+02 1.82812500e+02 1.85625000e+02 1.88437500e+02 - 1.91250000e+02 1.94062515e+02 1.96875015e+02 1.99687515e+02 - 2.02500015e+02 2.05312531e+02 2.08125031e+02 2.10937500e+02 - 2.13750000e+02 2.16562515e+02 2.19375000e+02 2.22187500e+02 - 2.24999969e+02 2.27812485e+02 2.30625000e+02 2.33437485e+02 - 2.36249985e+02 2.39062500e+02 2.41874985e+02 2.44687469e+02 - 2.47499969e+02 2.50312485e+02 2.53124985e+02 2.55937469e+02 - 2.58750000e+02 2.61562500e+02 2.64375000e+02 2.67187500e+02 - 2.70000000e+02 2.72812500e+02 2.75625000e+02 2.78437500e+02 - 2.81250000e+02 2.84062500e+02 2.86875000e+02 2.89687500e+02 - 2.92500000e+02 2.95312531e+02 2.98125000e+02 3.00937500e+02 - 3.03750000e+02 3.06562531e+02 3.09375031e+02 3.12187531e+02 - 3.15000031e+02 3.17812500e+02 3.20624969e+02 3.23437469e+02 - 3.26249969e+02 3.29062469e+02 3.31874969e+02 3.34687500e+02 - 3.37500000e+02 3.40312500e+02 3.43124969e+02 3.45937500e+02 - 3.48750000e+02 3.51562500e+02 3.54375000e+02 3.57187500e+02] - y: [-82.31327057 -79.5252533 -76.73671722 -73.94741821 -71.15768433 - -68.3677063 -65.57757568 -62.78733063 -59.99700928 -57.20662689 - -54.4161911 -51.62572479 -48.8352356 -46.04471588 -43.25418854 - -40.46364594 -37.67308807 -34.92211151 -32.28807831 -29.80595589 - -27.47015381 -25.27505112 -23.21505356 -21.2845726 -19.47800446 - -17.78974915 -16.2142086 -14.74578381 -13.37887764 -12.10788345 - -10.92720413 -9.83125305 -8.81441879 -7.87110519 -6.99571419 - -6.18265152 -5.42630577 -4.72108269 -4.06140375 -3.44164515 - -2.85620499 -2.29949784 -1.76592588 -1.24988604 -0.74578041 - -0.24800591 0.24800141 0.74577141 1.24988604 1.76592577 - 2.29949331 2.85620499 3.44164062 4.06140327 4.72109604 - 5.42631292 6.18265581 6.99571896 7.87110949 8.8144207 - 9.83125305 10.92721558 12.10788536 13.37887001 14.74577522 - 16.21421051 17.7897625 19.47801971 21.28458977 23.21506882 - 25.27505493 27.47015381 29.80596733 32.28808594 34.9221077 - 37.67308044 40.46363831 43.25417709 46.04471207 48.83522797 - 51.62572479 54.4161911 57.20662308 59.99700928 62.78732681 - 65.57757568 68.36772156 71.15769958 73.94741821 76.73672485 - 79.5252533 82.31199646 85.09243774 87.86787415] -] \ No newline at end of file diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index 5965d5a208..44a1c2c0f1 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -23,6 +23,7 @@ from six.moves import (filter, input, map, range, zip) # noqa import six +from datetime import datetime import os.path import numpy as np @@ -33,12 +34,13 @@ import iris.aux_factory import iris.coords import iris.coords as icoords -from iris.coords import DimCoord, AuxCoord +from iris.coords import DimCoord, AuxCoord, CellMethod import iris.tests as tests from iris.coord_systems import GeogCS, RotatedGeogCS from ._stock_2d_latlons import (sample_2d_latlons, make_bounds_discontiguous_at_point) + def lat_lon_cube(): """ Returns a cube with a latitude and longitude suitable for testing @@ -48,14 +50,14 @@ def lat_lon_cube(): cube = Cube(np.arange(12, dtype=np.int32).reshape((3, 4))) cs = GeogCS(6371229) coord = DimCoord(points=np.array([-1, 0, 1], dtype=np.int32), - standard_name='latitude', - units='degrees', - coord_system=cs) + standard_name='latitude', + units='degrees', + coord_system=cs) cube.add_dim_coord(coord, 0) coord = DimCoord(points=np.array([-1, 0, 1, 2], dtype=np.int32), - standard_name='longitude', - units='degrees', - coord_system=cs) + standard_name='longitude', + units='degrees', + coord_system=cs) cube.add_dim_coord(coord, 1) return cube @@ -68,16 +70,19 @@ def global_pp(): broken STASH encoding in that file. """ + def callback_global_pp(cube, field, filename): cube.standard_name = 'air_temperature' cube.units = 'K' + path = tests.get_data_path(('PP', 'aPPglob1', 'global.pp')) cube = iris.load_cube(path, callback=callback_global_pp) return cube def simple_pp(): - filename = tests.get_data_path(['PP', 'simple_pp', 'global.pp']) # Differs from global_pp() + # Differs from global_pp() + filename = tests.get_data_path(['PP', 'simple_pp', 'global.pp']) cube = iris.load_cube(filename) return cube @@ -99,7 +104,8 @@ def simple_1d(with_bounds=True): cube.long_name = 'thingness' cube.units = '1' points = np.arange(11, dtype=np.int32) + 1 - bounds = np.column_stack([np.arange(11, dtype=np.int32), np.arange(11, dtype=np.int32) + 1]) + bounds = np.column_stack( + [np.arange(11, dtype=np.int32), np.arange(11, dtype=np.int32) + 1]) coord = DimCoord(points, long_name='foo', units='1', bounds=bounds) cube.add_dim_coord(coord, 0) return cube @@ -125,11 +131,11 @@ def simple_2d(with_bounds=True): cube = Cube(np.arange(12, dtype=np.int32).reshape((3, 4))) cube.long_name = 'thingness' cube.units = '1' - y_points = np.array([2.5, 7.5, 12.5]) + y_points = np.array([2.5, 7.5, 12.5]) y_bounds = np.array([[0, 5], [5, 10], [10, 15]], dtype=np.int32) y_coord = DimCoord(y_points, long_name='bar', units='1', bounds=y_bounds if with_bounds else None) - x_points = np.array([ -7.5, 7.5, 22.5, 37.5]) + x_points = np.array([-7.5, 7.5, 22.5, 37.5]) x_bounds = np.array([[-15, 0], [0, 15], [15, 30], [30, 45]], dtype=np.int32) x_coord = DimCoord(x_points, long_name='foo', units='1', @@ -202,7 +208,7 @@ def simple_3d_w_multidim_coords(with_bounds=True): [2.5, 14., 36.5, 44.]]) x_bounds = np.array([[[-15, 0], [0, 15], [15, 30], [30, 45]], [[-25, 0], [0, 8], [8, 45], [45, 50]], - [[-5, 10], [10, 18], [18, 55], [18, 70]]], + [[-5, 10], [10, 18], [18, 55], [18, 70]]], dtype=np.int32) x_coord = AuxCoord(points=x_points, long_name='foo', units='1', bounds=x_bounds if with_bounds else None) @@ -240,13 +246,13 @@ def simple_3d(): cube.long_name = 'thingness' cube.units = '1' wibble_coord = DimCoord(np.array([10., 30.], - dtype=np.float32), - long_name='wibble', units='1') + dtype=np.float32), + long_name='wibble', units='1') lon = DimCoord([-180, -90, 0, 90], - standard_name='longitude', - units='degrees', circular=True) + standard_name='longitude', + units='degrees', circular=True) lat = DimCoord([90, 0, -90], - standard_name='latitude', units='degrees') + standard_name='latitude', units='degrees') cube.add_dim_coord(wibble_coord, [0]) cube.add_dim_coord(lat, [1]) cube.add_dim_coord(lon, [2]) @@ -389,33 +395,35 @@ def hybrid_height(): def simple_4d_with_hybrid_height(): - cube = iris.cube.Cube(np.arange(3*4*5*6, dtype='i8').reshape(3, 4, 5, 6), - "air_temperature", units="K") + cube = iris.cube.Cube( + np.arange(3 * 4 * 5 * 6, dtype='i8').reshape(3, 4, 5, 6), + "air_temperature", units="K") cube.add_dim_coord(DimCoord(np.arange(3, dtype='i8'), "time", units="hours since epoch"), 0) - cube.add_dim_coord(DimCoord(np.arange(4, dtype='i8')+10, + cube.add_dim_coord(DimCoord(np.arange(4, dtype='i8') + 10, "model_level_number", units="1"), 1) - cube.add_dim_coord(DimCoord(np.arange(5, dtype='i8')+20, + cube.add_dim_coord(DimCoord(np.arange(5, dtype='i8') + 20, "grid_latitude", units="degrees"), 2) - cube.add_dim_coord(DimCoord(np.arange(6, dtype='i8')+30, + cube.add_dim_coord(DimCoord(np.arange(6, dtype='i8') + 30, "grid_longitude", units="degrees"), 3) - cube.add_aux_coord(AuxCoord(np.arange(4, dtype='i8')+40, + cube.add_aux_coord(AuxCoord(np.arange(4, dtype='i8') + 40, long_name="level_height", units="m"), 1) - cube.add_aux_coord(AuxCoord(np.arange(4, dtype='i8')+50, + cube.add_aux_coord(AuxCoord(np.arange(4, dtype='i8') + 50, long_name="sigma", units="1"), 1) - cube.add_aux_coord(AuxCoord(np.arange(5*6, dtype='i8').reshape(5, 6)+100, - long_name="surface_altitude", - units="m"), [2, 3]) + cube.add_aux_coord( + AuxCoord(np.arange(5 * 6, dtype='i8').reshape(5, 6) + 100, + long_name="surface_altitude", + units="m"), [2, 3]) cube.add_aux_factory(iris.aux_factory.HybridHeightFactory( - delta=cube.coord("level_height"), - sigma=cube.coord("sigma"), - orography=cube.coord("surface_altitude"))) + delta=cube.coord("level_height"), + sigma=cube.coord("sigma"), + orography=cube.coord("surface_altitude"))) return cube @@ -428,7 +436,7 @@ def realistic_3d(): grid_longitude: 11)> """ - data = np.arange(7*9*11).reshape((7,9,11)) + data = np.arange(7 * 9 * 11).reshape((7, 9, 11)) lat_pts = np.linspace(-4, 4, 9) lon_pts = np.linspace(-5, 5, 11) time_pts = np.linspace(394200, 394236, 7) @@ -464,38 +472,18 @@ def realistic_4d(): grid_latitude: 100; grid_longitude: 100)> """ - # the stock arrays were created in Iris 0.8 with: -# >>> fname = iris.sample_data_path('PP', 'COLPEX', 'theta_and_orog_subset.pp') -# >>> theta = iris.load_cube(fname, 'air_potential_temperature') -# >>> for coord in theta.coords(): -# ... print(coord.name, coord.has_points(), coord.has_bounds(), coord.units) -# ... -# grid_latitude True True degrees -# grid_longitude True True degrees -# level_height True True m -# model_level True False 1 -# sigma True True 1 -# time True False hours since 1970-01-01 00:00:00 -# source True False no_unit -# forecast_period True False hours -# >>> arrays = [] -# >>> for coord in theta.coords(): -# ... if coord.has_points(): arrays.append(coord.points) -# ... if coord.has_bounds(): arrays.append(coord.bounds) -# >>> arrays.append(theta.data) -# >>> arrays.append(theta.coord('sigma').coord_system.orography.data) -# >>> np.savez('stock_arrays.npz', *arrays) data_path = tests.get_data_path(('stock', 'stock_arrays.npz')) if not os.path.isfile(data_path): raise IOError('Test data is not available at {}.'.format(data_path)) r = np.load(data_path) # sort the arrays based on the order they were originally given. # The names given are of the form 'arr_1' or 'arr_10' - _, arrays = zip(*sorted(six.iteritems(r), key=lambda item: int(item[0][4:]))) + _, arrays = zip( + *sorted(six.iteritems(r), key=lambda item: int(item[0][4:]))) lat_pts, lat_bnds, lon_pts, lon_bnds, level_height_pts, \ - level_height_bnds, model_level_pts, sigma_pts, sigma_bnds, time_pts, \ - _source_pts, forecast_period_pts, data, orography = arrays + level_height_bnds, model_level_pts, sigma_pts, sigma_bnds, time_pts, \ + _source_pts, forecast_period_pts, data, orography = arrays ll_cs = RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0)) @@ -576,7 +564,7 @@ def realistic_4d_w_missing_data(): forecast_period = DimCoord([0.0, 3.0, 6.0], standard_name='forecast_period', units='hours') - pressure = DimCoord(np.array([800., 900., 1000.], dtype=np.float32), + pressure = DimCoord(np.array([800., 900., 1000.], dtype=np.float32), long_name='pressure', units='hPa') cube = iris.cube.Cube(data, long_name='missing data test data', units='K', @@ -606,7 +594,7 @@ def ocean_sigma_z(): co_time = DimCoord([0.0, 1.0], standard_name='time', units='') co_lats = DimCoord([-58.1, -52.7, -46.9], standard_name='latitude', units=Unit('degrees')) - co_lons = DimCoord([65.1, 72.9, 83.7, 96.5], + co_lons = DimCoord([65.1, 72.9, 83.7, 96.5], standard_name='longitude', units=Unit('degrees')) co_ssh = AuxCoord([[[-0.63157895, -0.52631579, -0.42105263, -0.31578947], [-0.78947368, -0.68421053, -0.57894737, -0.47368421], @@ -646,3 +634,41 @@ def ocean_sigma_z(): sigma=co_sigma, nsigma=co_nsigma) cube.add_aux_factory(fact) return cube + + +def climatology_3d(): + def jan_offset(day, year): + dt = (datetime(year, 1, day) - datetime(1970, 1, 1)) + return dt.total_seconds() / (24. * 3600) + + days = range(10, 15) + years = [[year, year + 10] for year in [2001] * 4] + days_since = [[jan_offset(day, yr1), jan_offset(day, yr2)] + for (day, [yr1, yr2]) + in zip(days, years)] + time_bounds = np.array(days_since) + time_points = time_bounds[..., 0] + + lon = np.linspace(-25, 25, 5) + lat = np.linspace(0, 60, 3) + + time_dim = DimCoord(time_points, + standard_name='time', + bounds=time_bounds, + units='days since 1970-01-01 00:00:00-00', + climatological=True + ) + lon_dim = DimCoord(lon, standard_name='longitude') + lat_dim = DimCoord(lat, standard_name='latitude') + + data_shape = (len(time_points), len(lat), len(lon)) + values = np.zeros(shape=data_shape, dtype=np.int8) + cube = Cube(values) + cube.add_dim_coord(time_dim, 0) + cube.add_dim_coord(lat_dim, 1) + cube.add_dim_coord(lon_dim, 2) + cube.rename('climatology test') + cube.units = 'Kelvin' + cube.add_cell_method(CellMethod('mean over years', coords='time')) + + return cube diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index ca15dea4b1..4ed4a08fea 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2017, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -291,15 +291,17 @@ def vec_mag(u, v): self.assertCMLApproxData(b, ('analysis', 'apply_ifunc_frompyfunc.cml')) def test_ifunc_init_fail(self): + import six # should fail because 'blah' is a string not a python function self.assertRaises(TypeError, iris.analysis.maths.IFunc, 'blah', lambda cube: cf_units.Unit('1')) - # should fail because math.sqrt is built-in function, which can not be - # used in inspect.getargspec - self.assertRaises(TypeError, iris.analysis.maths.IFunc, math.sqrt, - lambda cube: cf_units.Unit('1')) + if six.PY2: + # should fail because math.sqrt is built-in function, which cannot + # be used in inspect.getargspec + self.assertRaises(TypeError, iris.analysis.maths.IFunc, math.sqrt, + lambda cube: cf_units.Unit('1')) # should fail because np.frexp gives 2 arrays as output self.assertRaises(ValueError, iris.analysis.maths.IFunc, np.frexp, @@ -594,14 +596,13 @@ def vec_mag(u, v): def vec_mag_data_func(u_data, v_data): return np.sqrt( u_data**2 + v_data**2 ) - vec_mag_ifunc = iris.analysis.maths.IFunc(vec_mag_data_func, lambda a,b: (a + b).units) + vec_mag_ifunc = iris.analysis.maths.IFunc(vec_mag_data_func, + lambda a, b: (a + b).units) b2 = vec_mag_ifunc(a, c) self.assertArrayAlmostEqual(b.data, b2.data) - cs_ifunc = iris.analysis.maths.IFunc(np.cumsum, - lambda a: a.units - ) + cs_ifunc = iris.analysis.maths.IFunc(np.cumsum, lambda a: a.units) b = cs_ifunc(a, axis=1) ans = a.data.copy() diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 10fa49cdc9..b4512dd937 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -105,7 +105,6 @@ class StandardReportWithExclusions(pep8.StandardReport): '*/iris/tests/test_cell.py', '*/iris/tests/test_cf.py', '*/iris/tests/test_constraints.py', - '*/iris/tests/test_coord_api.py', '*/iris/tests/test_coord_categorisation.py', '*/iris/tests/test_coordsystem.py', '*/iris/tests/test_cube_to_pp.py', diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 4708e2febb..4866adc915 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2017, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -18,14 +18,16 @@ from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa +import six -# import iris tests first so that some things can be initialised before importing anything else +# import iris tests first so that some things can be initialised before +# importing anything else import iris.tests as tests from xml.dom.minidom import Document -import cf_units import numpy as np +import numpy.ma as ma import iris import iris.aux_factory @@ -36,105 +38,113 @@ import iris.tests.stock - - @tests.skip_data class TestCoordSlicing(tests.IrisTest): def setUp(self): cube = iris.tests.stock.realistic_4d() self.lat = cube.coord('grid_latitude') self.surface_altitude = cube.coord('surface_altitude') - + def test_slice_copy(self): a = self.lat b = a.copy() self.assertEqual(a, b) self.assertFalse(a is b) - + a = self.lat b = a[:] self.assertEqual(a, b) self.assertFalse(a is b) - + def test_slice_multiple_indices(self): aux_lat = iris.coords.AuxCoord.from_coord(self.lat) aux_sliced = aux_lat[(3, 4), :] - dim_sliced = self.lat[(3, 4), :] - + dim_sliced = self.lat[(3, 4), :] + self.assertEqual(dim_sliced, aux_sliced) def test_slice_reverse(self): b = self.lat[::-1] np.testing.assert_array_equal(b.points, self.lat.points[::-1]) np.testing.assert_array_equal(b.bounds, self.lat.bounds[::-1, :]) - + c = b[::-1] self.assertEqual(self.lat, c) - + def test_multidim(self): a = self.surface_altitude # make some arbitrary bounds bound_shape = a.shape + (2,) a.bounds = np.arange(np.prod(bound_shape)).reshape(bound_shape) b = a[(0, 2), (0, -1)] - np.testing.assert_array_equal(b.points, a.points[(0, 2), :][:, (0, -1)]) - np.testing.assert_array_equal(b.bounds, a.bounds[(0, 2), :, :][:, (0, -1), :]) + np.testing.assert_array_equal( + b.points, a.points[(0, 2), :][:, (0, -1)]) + np.testing.assert_array_equal( + b.bounds, a.bounds[(0, 2), :, :][:, (0, -1), :]) class TestCoordIntersection(tests.IrisTest): def setUp(self): - self.a = iris.coords.DimCoord(np.arange(9., dtype=np.float32) * 3 + 9., long_name='foo', units='meter')# 0.75) + self.a = iris.coords.DimCoord( + np.arange(9., dtype=np.float32) * 3 + 9., + long_name='foo', + units='meter') # 0.75) self.a.guess_bounds(0.75) - pts = np.array([ 3., 6., 9., 12., 15., 18., 21., 24., 27., 30.], dtype=np.float32) - bnds = np.array([[ 0.75, 3.75], - [ 3.75, 6.75], - [ 6.75, 9.75], - [ 9.75, 12.75], - [ 12.75, 15.75], - [ 15.75, 18.75], - [ 18.75, 21.75], - [ 21.75, 24.75], - [ 24.75, 27.75], - [ 27.75, 30.75]], dtype=np.float32) - self.b = iris.coords.AuxCoord(pts, long_name='foo', units='meter', bounds=bnds) - + pts = np.array( + [3., 6., 9., 12., 15., 18., 21., 24., 27., 30.], + dtype=np.float32) + bnds = np.array([[0.75, 3.75], + [3.75, 6.75], + [6.75, 9.75], + [9.75, 12.75], + [12.75, 15.75], + [15.75, 18.75], + [18.75, 21.75], + [21.75, 24.75], + [24.75, 27.75], + [27.75, 30.75]], dtype=np.float32) + self.b = iris.coords.AuxCoord( + pts, long_name='foo', units='meter', bounds=bnds) + def test_basic_intersection(self): inds = self.a.intersect(self.b, return_indices=True) self.assertEqual((0, 1, 2, 3, 4, 5, 6, 7), tuple(inds)) - + c = self.a.intersect(self.b) self.assertXMLElement(c, ('coord_api', 'intersection.xml')) - + def test_intersection_reverse(self): - inds = self.a.intersect(self.b[::-1], return_indices=True) + inds = self.a.intersect(self.b[::-1], return_indices=True) self.assertEqual((7, 6, 5, 4, 3, 2, 1, 0), tuple(inds)) - + c = self.a.intersect(self.b[::-1]) self.assertXMLElement(c, ('coord_api', 'intersection_reversed.xml')) - - def test_no_intersection_on_points(self): - # Coordinates which do not share common points but with common bounds should fail + + def test_no_intersection_on_points(self): + # Coordinates which do not share common points but with common + # bounds should fail self.a.points = self.a.points + 200 self.assertRaises(ValueError, self.a.intersect, self.b) - + def test_intersection_one_fewer_upper_bound_than_lower(self): - self.b.bounds[4, 1] = self.b.bounds[0, 1] + self.b.bounds[4, 1] = self.b.bounds[0, 1] c = self.a.intersect(self.b) self.assertXMLElement(c, ('coord_api', 'intersection_missing.xml')) - - def test_no_intersection_on_bounds(self): - # Coordinates which do not share common bounds but with common points should fail + + def test_no_intersection_on_bounds(self): + # Coordinates which do not share common bounds but with common + # points should fail self.a.bounds = None a = self.a.copy() a.bounds = None a.guess_bounds(bound_position=0.25) self.assertRaises(ValueError, a.intersect, self.b) - + def test_no_intersection_on_name(self): # Coordinates which do not share the same name should fail self.a.long_name = 'foobar' self.assertRaises(ValueError, self.a.intersect, self.b) - + def test_no_intersection_on_unit(self): # Coordinates which do not share the same unit should fail self.a.units = 'kilometer' @@ -145,8 +155,10 @@ def test_commutative(self): cube = iris.tests.stock.realistic_4d() coord = cube.coord('grid_longitude') offset_coord = coord.copy() - offset_coord = offset_coord - (offset_coord.points[20] - offset_coord.points[0]) - self.assertEqual(coord.intersect(offset_coord), offset_coord.intersect(coord)) + offset_coord =\ + offset_coord - (offset_coord.points[20] - offset_coord.points[0]) + self.assertEqual( + coord.intersect(offset_coord), offset_coord.intersect(coord)) class TestXML(tests.IrisTest): @@ -192,7 +204,7 @@ def test_AuxCoord_str(self): class TestCoord_ReprStr_time(tests.IrisTest): def setUp(self): self.time = iris.tests.stock.realistic_4d().coord('time') - + def test_DimCoord_repr(self): self.assertRepr(self.time, ('coord_api', 'str_repr', 'dim_time_repr.txt')) @@ -214,20 +226,27 @@ class TestAuxCoordCreation(tests.IrisTest): def test_basic(self): a = iris.coords.AuxCoord(np.arange(10), 'air_temperature', units='kelvin') - result = "AuxCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), standard_name='air_temperature', units=Unit('kelvin'))" + result = "AuxCoord(" \ + "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," \ + " standard_name='air_temperature'," \ + " units=Unit('kelvin'))" self.assertEqual(result, str(a)) b = iris.coords.AuxCoord(list(range(10)), attributes={'monty': 'python'}) - result = "AuxCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), standard_name=None, units=Unit('1'), attributes={'monty': 'python'})" + result = "AuxCoord(" \ + "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," \ + " standard_name=None," \ + " units=Unit('1')," \ + " attributes={'monty': 'python'})" self.assertEqual(result, str(b)) - + def test_excluded_attributes(self): with self.assertRaises(ValueError): iris.coords.AuxCoord(np.arange(10), 'air_temperature', units='kelvin', attributes={'standard_name': 'whoopsy'}) - + a = iris.coords.AuxCoord(np.arange(10), 'air_temperature', units='kelvin') with self.assertRaises(ValueError): @@ -239,21 +258,27 @@ def test_coord_system(self): a = iris.coords.AuxCoord(np.arange(10), 'air_temperature', units='kelvin', coord_system=iris.coord_systems.GeogCS(6000)) - result = "AuxCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), standard_name='air_temperature', units=Unit('kelvin'), "\ - "coord_system=GeogCS(6000.0))" + result = "AuxCoord(" \ + "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," \ + " standard_name='air_temperature'," \ + " units=Unit('kelvin')," \ + " coord_system=GeogCS(6000.0))" self.assertEqual(result, str(a)) - + def test_bounded(self): a = iris.coords.AuxCoord(np.arange(10), 'air_temperature', units='kelvin', bounds=np.arange(0, 20).reshape(10, 2)) result = ("AuxCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" - ", bounds=array([[ 0, 1],\n [ 2, 3],\n [ 4, 5],\n [ 6, 7],\n [ 8, 9],\n "\ - "[10, 11],\n [12, 13],\n [14, 15],\n [16, 17],\n [18, 19]])" + ", bounds=array([" + "[ 0, 1],\n [ 2, 3],\n [ 4, 5],\n " + "[ 6, 7],\n [ 8, 9],\n [10, 11],\n " + "[12, 13],\n [14, 15],\n [16, 17],\n " + "[18, 19]])" ", standard_name='air_temperature', units=Unit('kelvin'))" ) self.assertEqual(result, str(a)) - + def test_string_coord_equality(self): b = iris.coords.AuxCoord(['Jan', 'Feb', 'March'], units='no_unit') c = iris.coords.AuxCoord(['Jan', 'Feb', 'March'], units='no_unit') @@ -266,26 +291,33 @@ def test_AuxCoord_fromcoord(self): a = iris.coords.DimCoord(10, coord_system=crs) b = iris.coords.AuxCoord.from_coord(a) self.assertIsNot(a.coord_system, b.coord_system) - - + + class TestDimCoordCreation(tests.IrisTest): def test_basic(self): a = iris.coords.DimCoord(np.arange(10), 'air_temperature', units='kelvin') - result = "DimCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), standard_name='air_temperature', units=Unit('kelvin'))" + result = "DimCoord(" \ + "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," \ + " standard_name='air_temperature'," \ + " units=Unit('kelvin'))" self.assertEqual(result, str(a)) b = iris.coords.DimCoord(list(range(10)), attributes={'monty': 'python'}) - result = "DimCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), standard_name=None, units=Unit('1'), attributes={'monty': 'python'})" + result = "DimCoord(" \ + "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," \ + " standard_name=None," \ + " units=Unit('1')," \ + " attributes={'monty': 'python'})" self.assertEqual(result, str(b)) - + def test_excluded_attributes(self): with self.assertRaises(ValueError): iris.coords.DimCoord(np.arange(10), 'air_temperature', units='kelvin', attributes={'standard_name': 'whoopsy'}) - + a = iris.coords.DimCoord(np.arange(10), 'air_temperature', units='kelvin') with self.assertRaises(ValueError): @@ -297,34 +329,49 @@ def test_coord_system(self): a = iris.coords.DimCoord(np.arange(10), 'air_temperature', units='kelvin', coord_system=iris.coord_systems.GeogCS(6000)) - result = "DimCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), standard_name='air_temperature', units=Unit('kelvin'), "\ - "coord_system=GeogCS(6000.0))" + result = "DimCoord(" \ + "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])," \ + " standard_name='air_temperature'," \ + " units=Unit('kelvin')," \ + " coord_system=GeogCS(6000.0))" self.assertEqual(result, str(a)) - + def test_bounded(self): a = iris.coords.DimCoord(np.arange(10), 'air_temperature', units='kelvin', bounds=np.arange(0, 20).reshape(10, 2)) result = ("DimCoord(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" - ", bounds=array([[ 0, 1],\n [ 2, 3],\n [ 4, 5],\n [ 6, 7],\n [ 8, 9],\n "\ - "[10, 11],\n [12, 13],\n [14, 15],\n [16, 17],\n [18, 19]])" + ", bounds=array([" + "[ 0, 1],\n [ 2, 3],\n [ 4, 5],\n " + "[ 6, 7],\n [ 8, 9],\n [10, 11],\n " + "[12, 13],\n [14, 15],\n [16, 17],\n " + "[18, 19]])" ", standard_name='air_temperature', units=Unit('kelvin'))" ) - self.assertEqual(result, str(a)) - + self.assertEqual(result, str(a)) + def test_dim_coord_restrictions(self): # 1d with self.assertRaisesRegexp(ValueError, 'must be scalar or 1-dim'): iris.coords.DimCoord([[1, 2, 3], [4, 5, 6]]) - # monotonic + # monotonic points with self.assertRaisesRegexp(ValueError, 'must be strictly monotonic'): iris.coords.DimCoord([1, 2, 99, 4, 5]) # monotonic bounds - with self.assertRaisesRegexp(ValueError, - 'monotonicity.*consistent.*all bounds'): + with self.assertRaisesRegexp(ValueError, + 'direction of monotonicity'): iris.coords.DimCoord([1, 2, 3], bounds=[[1, 12], [2, 9], [3, 6]]) + # masked points + emsg = 'points array must not be masked' + with six.assertRaisesRegex(self, TypeError, emsg): + iris.coords.DimCoord(ma.masked_array([0, 1, 2], mask=[0, 1, 0])) + # masked bounds + emsg = 'bounds array must not be masked' + with six.assertRaisesRegex(self, TypeError, emsg): + iris.coords.DimCoord([1], bounds=ma.masked_array([[0, 2]], + mask=True)) # shapes of points and bounds - msg = 'The shape of the bounds array should be' + msg = "The shape of the 'unknown' DimCoord bounds array should be" with self.assertRaisesRegexp(ValueError, msg): iris.coords.DimCoord([1, 2, 3], bounds=[0.5, 1.5, 2.5, 3.5]) # another example of shapes of points and bounds @@ -334,19 +381,23 @@ def test_dim_coord_restrictions(self): # numeric with self.assertRaises(ValueError): iris.coords.DimCoord(['Jan', 'Feb', 'March']) - + def test_DimCoord_equality(self): # basic regular coord - b = iris.coords.DimCoord([1, 2]) - c = iris.coords.DimCoord([1, 2.]) + b = iris.coords.DimCoord([1, 2]) + c = iris.coords.DimCoord([1, 2.]) d = iris.coords.DimCoord([1, 2], circular=True) self.assertEqual(b, c) self.assertNotEqual(b, d) - + def test_Dim_to_Aux(self): - a = iris.coords.DimCoord(np.arange(10), standard_name='air_temperature', long_name='custom air temp', - units='kelvin', attributes={'monty': 'python'}, - bounds=np.arange(20).reshape(10, 2), circular=True) + a = iris.coords.DimCoord(np.arange(10), + standard_name='air_temperature', + long_name='custom air temp', + units='kelvin', + attributes={'monty': 'python'}, + bounds=np.arange(20).reshape(10, 2), + circular=True) b = iris.coords.AuxCoord.from_coord(a) # Note - circular attribute is not a factor in equality comparison self.assertEqual(a, b) @@ -414,10 +465,13 @@ def _build_coord(self, start=None, step=None, count=None): count = int(count or self.count) bound_position = dtype(0.5) points = np.arange(count, dtype=dtype) * step + start - bounds = np.concatenate([[points - bound_position * step], - [points + (1 - bound_position) * step]]).T - self.lon = iris.coords.AuxCoord(points, 'latitude', units='degrees', bounds=bounds) - self.rlon = iris.coords.AuxCoord(np.deg2rad(points), 'latitude', units='radians', bounds=np.deg2rad(bounds)) + bounds = np.concatenate([[points - bound_position * step], + [points + (1 - bound_position) * step]]).T + self.lon = iris.coords.AuxCoord( + points, 'latitude', units='degrees', bounds=bounds) + self.rlon = iris.coords.AuxCoord( + np.deg2rad(points), 'latitude', units='radians', + bounds=np.deg2rad(bounds)) def setUp(self): self.start = 0 @@ -429,103 +483,119 @@ def setUp(self): class TestCoordAdditionSubtract(TestCoordMaths): def test_subtract(self): r_expl = self.lon - 10 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'subtract_simple_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'subtract_simple_expl.xml')) + def test_subtract_in_place(self): r_expl = self.lon.copy() r_expl -= 10 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'subtract_simple_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'subtract_simple_expl.xml')) + def test_neg(self): self._build_coord(start=8) r_expl = -self.lon np.testing.assert_array_equal(r_expl.points, -(self.lon.points)) - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'negate_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'negate_expl.xml')) + def test_right_subtract(self): r_expl = 10 - self.lon # XXX original xml was for regular case, not explicit. - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'r_subtract_simple_exl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'r_subtract_simple_exl.xml')) + def test_add(self): r_expl = self.lon + 10 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'add_simple_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'add_simple_expl.xml')) + def test_add_in_place(self): r_expl = self.lon.copy() r_expl += 10 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'add_simple_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'add_simple_expl.xml')) + def test_add_float(self): r_expl = self.lon + 10.321 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'add_float_expl.xml')) - self.assertEqual(r_expl, 10.321 + self.lon.copy() ) - - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'add_float_expl.xml')) + self.assertEqual(r_expl, 10.321 + self.lon.copy()) + + class TestCoordMultDivide(TestCoordMaths): def test_divide(self): r_expl = self.lon.copy() / 10 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'divide_simple_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'divide_simple_expl.xml')) + def test_right_divide(self): self._build_coord(start=10) test_coord = self.lon.copy() - + r_expl = 1 / test_coord - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'right_divide_simple_expl.xml')) + self.assertXMLElement( + r_expl, + ('coord_api', 'coord_maths', 'right_divide_simple_expl.xml')) def test_divide_in_place(self): r_expl = self.lon.copy() r_expl /= 10 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'divide_simple_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'divide_simple_expl.xml')) + def test_multiply(self): r_expl = self.lon.copy() * 10 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'multiply_simple_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'multiply_simple_expl.xml')) + def test_multiply_in_place_reg(self): r_expl = self.lon.copy() r_expl *= 10 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'multiply_simple_expl.xml')) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'multiply_simple_expl.xml')) + def test_multiply_float(self): r_expl = self.lon.copy() * 10.321 - self.assertXMLElement(r_expl, ('coord_api', 'coord_maths', 'mult_float_expl.xml')) - self.assertEqual(r_expl, 10.321 * self.lon.copy() ) - + self.assertXMLElement( + r_expl, ('coord_api', 'coord_maths', 'mult_float_expl.xml')) + self.assertEqual(r_expl, 10.321 * self.lon.copy()) + class TestCoordCollapsed(tests.IrisTest): def create_1d_coord(self, bounds=None, points=None, units='meter'): - coord = iris.coords.DimCoord(points, long_name='test', units=units, + coord = iris.coords.DimCoord(points, long_name='test', units=units, bounds=bounds) return coord - + def test_explicit(self): - orig_coord = self.create_1d_coord(points=list(range(10)), + orig_coord = self.create_1d_coord(points=list(range(10)), bounds=[(b, b+1) for b in range(10)]) coord_expected = self.create_1d_coord(points=5, bounds=[(0, 10)]) # test points & bounds self.assertEqual(coord_expected, orig_coord.collapsed()) - + # test points only coord = orig_coord.copy() coord_expected = self.create_1d_coord(points=4, bounds=[(0, 9)]) coord.bounds = None - self.assertEqual(coord_expected, coord.collapsed()) + self.assertEqual(coord_expected, coord.collapsed()) def test_circular_collapse(self): - # set up a coordinate that wraps 360 degrees in points using the circular flag + # set up a coordinate that wraps 360 degrees in points using the + # circular flag coord = self.create_1d_coord(None, np.arange(10) * 36, 'degrees') expected_coord = self.create_1d_coord([0., 360.], [180.], 'degrees') coord.circular = True - + # test collapsing self.assertEqual(expected_coord, coord.collapsed()) - # the order of the points/bounds should not affect the resultant bounded coordinate + # the order of the points/bounds should not affect the resultant + # bounded coordinate. coord = coord[::-1] self.assertEqual(expected_coord, coord.collapsed()) - + def test_nd_bounds(self): cube = iris.tests.stock.simple_2d_w_multidim_coords(with_bounds=True) pcube = cube.collapsed(['bar', 'foo'], iris.analysis.SUM) @@ -755,11 +825,12 @@ def setUp(self): standard_name='longitude', var_name='lon', units='degrees') - self.dim_coord = iris.coords.DimCoord(np.arange(0, 360, dtype=np.float64), - standard_name='longitude', - var_name='lon', - units='degrees', - circular=True) + self.dim_coord = iris.coords.DimCoord( + np.arange(0, 360, dtype=np.float64), + standard_name='longitude', + var_name='lon', + units='degrees', + circular=True) def test_not_compatible(self): r = self.aux_coord.copy() @@ -774,7 +845,7 @@ def test_not_compatible(self): self.assertFalse(self.aux_coord.is_compatible(r)) # Different attributes. r = self.aux_coord.copy() - self.aux_coord.attributes['source']= 'bob' + self.aux_coord.attributes['source'] = 'bob' r.attributes['source'] = 'alice' self.assertFalse(self.aux_coord.is_compatible(r)) @@ -782,7 +853,7 @@ def test_compatible(self): # The following changes should not affect compatibility. # Different non-common attributes. r = self.aux_coord.copy() - self.aux_coord.attributes['source']= 'bob' + self.aux_coord.attributes['source'] = 'bob' r.attributes['origin'] = 'alice' self.assertTrue(self.aux_coord.is_compatible(r)) # Different points. @@ -812,7 +883,7 @@ def test_defn(self): def test_is_ignore(self): r = self.aux_coord.copy() - self.aux_coord.attributes['source']= 'bob' + self.aux_coord.attributes['source'] = 'bob' r.attributes['source'] = 'alice' self.assertFalse(self.aux_coord.is_compatible(r)) # Use ignore keyword. @@ -823,7 +894,8 @@ def test_is_ignore(self): class TestAuxCoordEquality(tests.IrisTest): def test_not_implmemented(self): - class Terry(object): pass + class Terry(object): + pass aux = iris.coords.AuxCoord(0) self.assertIs(aux.__eq__(Terry()), NotImplemented) self.assertIs(aux.__ne__(Terry()), NotImplemented) @@ -831,7 +903,8 @@ class Terry(object): pass class TestDimCoordEquality(tests.IrisTest): def test_not_implmemented(self): - class Terry(object): pass + class Terry(object): + pass dim = iris.coords.DimCoord(0) aux = iris.coords.AuxCoord(0) self.assertIs(dim.__eq__(Terry()), NotImplemented) @@ -839,6 +912,15 @@ class Terry(object): pass self.assertIs(dim.__eq__(aux), NotImplemented) self.assertIs(dim.__ne__(aux), NotImplemented) + def test_climatological(self): + co1 = iris.coords.DimCoord( + [0], bounds=[[0, 1]], units='days since 1970-01-01') + co2 = co1.copy() + co2.climatological = True + self.assertNotEqual(co1, co2) + co2.climatological = False + self.assertEqual(co1, co2) + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py index 1ca86e6929..04b0c3ce2c 100644 --- a/lib/iris/tests/test_merge.py +++ b/lib/iris/tests/test_merge.py @@ -26,7 +26,10 @@ # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests -from collections import Iterable +try: # Python 3 + from collections.abc import Iterable +except ImportError: # Python 2.7 + from collections import Iterable import datetime import itertools import numpy as np diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index a942f4b398..2b23b144a8 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2018, Met Office +# (C) British Crown Copyright 2010 - 2019, Met Office # # This file is part of Iris. # @@ -106,6 +106,11 @@ def test_load_global_xyzt_gems_iter(self): self.assertCML(cube, ('netcdf', 'netcdf_global_xyzt_gems_iter_%d.cml' % i)) + # ------------------------------------------------------------------------- + # It is not considered necessary to have integration tests for + # loading EVERY coordinate system. A subset are tested below. + # ------------------------------------------------------------------------- + def test_load_rotated_xy_land(self): # Test loading single xy rotated pole CF-netCDF file. cube = iris.load_cube(tests.get_data_path( diff --git a/lib/iris/tests/unit/experimental/regrid/test__CurvilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py similarity index 97% rename from lib/iris/tests/unit/experimental/regrid/test__CurvilinearRegridder.py rename to lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py index 99dd0c483b..14997fc7a2 100644 --- a/lib/iris/tests/unit/experimental/regrid/test__CurvilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2015 - 2017, Met Office +# (C) British Crown Copyright 2015 - 2019, Met Office # # This file is part of Iris. # @@ -14,7 +14,7 @@ # # You should have received a copy of the GNU Lesser General Public License # along with Iris. If not, see . -"""Unit tests for :class:`iris.experimental.regrid._CurvilinearRegridder`.""" +"""Unit tests for :class:`iris.analysis._regrid.CurvilinearRegridder`.""" from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa @@ -30,7 +30,7 @@ from iris.coords import AuxCoord, DimCoord from iris.coord_systems import GeogCS, RotatedGeogCS from iris.fileformats.pp import EARTH_RADIUS -from iris.experimental.regrid import _CurvilinearRegridder as Regridder +from iris.analysis._regrid import CurvilinearRegridder as Regridder from iris.tests import mock from iris.tests.stock import global_pp, lat_lon_cube @@ -57,10 +57,10 @@ def test_bad_grid_type(self): class Test___call__(tests.IrisTest): def setUp(self): self.func_setup = ( - 'iris.experimental.regrid.' + 'iris.analysis._regrid.' '_regrid_weighted_curvilinear_to_rectilinear__prepare') self.func_operate = ( - 'iris.experimental.regrid.' + 'iris.analysis._regrid.' '_regrid_weighted_curvilinear_to_rectilinear__perform') # Define a test source grid and target grid, basically the same. self.src_grid = global_pp() diff --git a/lib/iris/tests/unit/experimental/regrid/test_PointInCell.py b/lib/iris/tests/unit/analysis/test_PointInCell.py similarity index 86% rename from lib/iris/tests/unit/experimental/regrid/test_PointInCell.py rename to lib/iris/tests/unit/analysis/test_PointInCell.py index 17be3845ec..2d4272712a 100644 --- a/lib/iris/tests/unit/experimental/regrid/test_PointInCell.py +++ b/lib/iris/tests/unit/analysis/test_PointInCell.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2015, Met Office +# (C) British Crown Copyright 2015 - 2019, Met Office # # This file is part of Iris. # @@ -14,7 +14,7 @@ # # You should have received a copy of the GNU Lesser General Public License # along with Iris. If not, see . -"""Unit tests for :class:`iris.experimental.regrid.PointInCell`.""" +"""Unit tests for :class:`iris.analysis.PointInCell`.""" from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa @@ -23,7 +23,7 @@ # importing anything else. import iris.tests as tests -from iris.experimental.regrid import PointInCell +from iris.analysis import PointInCell from iris.tests import mock @@ -31,7 +31,7 @@ class Test_regridder(tests.IrisTest): def test(self): point_in_cell = PointInCell(mock.sentinel.weights) - with mock.patch('iris.experimental.regrid._CurvilinearRegridder', + with mock.patch('iris.analysis.CurvilinearRegridder', return_value=mock.sentinel.regridder) as ecr: regridder = point_in_cell.regridder(mock.sentinel.src, mock.sentinel.target) diff --git a/lib/iris/tests/unit/analysis/test_RMS.py b/lib/iris/tests/unit/analysis/test_RMS.py index 150e0f9040..2ad7d75ed3 100644 --- a/lib/iris/tests/unit/analysis/test_RMS.py +++ b/lib/iris/tests/unit/analysis/test_RMS.py @@ -92,8 +92,7 @@ def test_masked_weighted(self): class Test_lazy_aggregate(tests.IrisTest): def test_1d(self): # 1-dimensional input. - data = as_lazy_data(np.array([5, 2, 6, 4], dtype=np.float64), - chunks=-1) + data = as_lazy_data(np.array([5, 2, 6, 4], dtype=np.float64)) rms = RMS.lazy_aggregate(data, 0) expected_rms = 4.5 self.assertAlmostEqual(rms, expected_rms) @@ -101,16 +100,14 @@ def test_1d(self): def test_2d(self): # 2-dimensional input. data = as_lazy_data(np.array([[5, 2, 6, 4], [12, 4, 10, 8]], - dtype=np.float64), - chunks=-1) + dtype=np.float64)) expected_rms = np.array([4.5, 9.0], dtype=np.float64) rms = RMS.lazy_aggregate(data, 1) self.assertArrayAlmostEqual(rms, expected_rms) def test_1d_weighted(self): # 1-dimensional input with weights. - data = as_lazy_data(np.array([4, 7, 10, 8], dtype=np.float64), - chunks=-1) + data = as_lazy_data(np.array([4, 7, 10, 8], dtype=np.float64)) weights = np.array([1, 4, 3, 2], dtype=np.float64) expected_rms = 8.0 # https://github.com/dask/dask/issues/3846. @@ -120,10 +117,8 @@ def test_1d_weighted(self): def test_1d_lazy_weighted(self): # 1-dimensional input with lazy weights. - data = as_lazy_data(np.array([4, 7, 10, 8], dtype=np.float64), - chunks=-1) - weights = as_lazy_data(np.array([1, 4, 3, 2], dtype=np.float64), - chunks=-1) + data = as_lazy_data(np.array([4, 7, 10, 8], dtype=np.float64)) + weights = as_lazy_data(np.array([1, 4, 3, 2], dtype=np.float64)) expected_rms = 8.0 # https://github.com/dask/dask/issues/3846. with self.assertRaisesRegexp(TypeError, 'unexpected keyword argument'): @@ -133,8 +128,7 @@ def test_1d_lazy_weighted(self): def test_2d_weighted(self): # 2-dimensional input with weights. data = as_lazy_data(np.array([[4, 7, 10, 8], [14, 16, 20, 8]], - dtype=np.float64), - chunks=-1) + dtype=np.float64)) weights = np.array([[1, 4, 3, 2], [2, 1, 1.5, 0.5]], dtype=np.float64) expected_rms = np.array([8.0, 16.0], dtype=np.float64) # https://github.com/dask/dask/issues/3846. @@ -144,8 +138,7 @@ def test_2d_weighted(self): def test_unit_weighted(self): # Unit weights should be the same as no weights. - data = as_lazy_data(np.array([5, 2, 6, 4], dtype=np.float64), - chunks=-1) + data = as_lazy_data(np.array([5, 2, 6, 4], dtype=np.float64)) weights = np.ones_like(data) expected_rms = 4.5 # https://github.com/dask/dask/issues/3846. @@ -157,8 +150,7 @@ def test_masked(self): # Masked entries should be completely ignored. data = as_lazy_data(ma.array([5, 10, 2, 11, 6, 4], mask=[False, True, False, True, False, False], - dtype=np.float64), - chunks=-1) + dtype=np.float64)) expected_rms = 4.5 rms = RMS.lazy_aggregate(data, 0) self.assertAlmostEqual(rms, expected_rms) @@ -169,8 +161,7 @@ def test_masked_weighted(self): # For now, masked weights are simply not supported. data = as_lazy_data(ma.array([4, 7, 18, 10, 11, 8], mask=[False, False, True, False, True, False], - dtype=np.float64), - chunks=-1) + dtype=np.float64)) weights = np.array([1, 4, 5, 3, 8, 2]) expected_rms = 8.0 with self.assertRaisesRegexp(TypeError, 'unexpected keyword argument'): diff --git a/lib/iris/tests/unit/coord_systems/test_Geostationary.py b/lib/iris/tests/unit/coord_systems/test_Geostationary.py new file mode 100644 index 0000000000..3ff8a20893 --- /dev/null +++ b/lib/iris/tests/unit/coord_systems/test_Geostationary.py @@ -0,0 +1,97 @@ +# (C) British Crown Copyright 2014 - 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +"""Unit tests for the :class:`iris.coord_systems.Geostationary` class.""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +import six + +import cartopy.crs as ccrs +from iris.coord_systems import GeogCS, Geostationary + + +class Test(tests.IrisTest): + def setUp(self): + self.latitude_of_projection_origin = 0.0 + self.longitude_of_projection_origin = 0.0 + self.perspective_point_height = 35785831.0 + self.sweep_angle_axis = 'y' + self.false_easting = 0.0 + self.false_northing = 0.0 + + self.semi_major_axis = 6377563.396 + self.semi_minor_axis = 6356256.909 + self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) + self.globe = ccrs.Globe(semimajor_axis=self.semi_major_axis, + semiminor_axis=self.semi_minor_axis, + ellipse=None) + + # Actual and expected coord system can be re-used for + # Geostationary.test_crs_creation and test_projection_creation. + self.expected = ccrs.Geostationary( + central_longitude=self.longitude_of_projection_origin, + satellite_height=self.perspective_point_height, + false_easting=self.false_easting, + false_northing=self.false_northing, + globe=self.globe, + sweep_axis=self.sweep_angle_axis + ) + self.geo_cs = Geostationary(self.latitude_of_projection_origin, + self.longitude_of_projection_origin, + self.perspective_point_height, + self.sweep_angle_axis, + self.false_easting, + self.false_northing, + self.ellipsoid) + + def test_crs_creation(self): + res = self.geo_cs.as_cartopy_crs() + self.assertEqual(res, self.expected) + + def test_projection_creation(self): + res = self.geo_cs.as_cartopy_projection() + self.assertEqual(res, self.expected) + + def test_non_zero_lat(self): + with six.assertRaisesRegex(self, ValueError, 'Non-zero latitude'): + Geostationary(0.1, + self.longitude_of_projection_origin, + self.perspective_point_height, + self.sweep_angle_axis, + self.false_easting, + self.false_northing, + self.ellipsoid) + + def test_invalid_sweep(self): + with six.assertRaisesRegex( + self, ValueError, 'Invalid sweep_angle_axis'): + Geostationary(self.latitude_of_projection_origin, + self.longitude_of_projection_origin, + self.perspective_point_height, + 'a', + self.false_easting, + self.false_northing, + self.ellipsoid) + + +if __name__ == '__main__': + tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py index e225355d11..0e3460cd9f 100644 --- a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py +++ b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2014 - 2015, Met Office +# (C) British Crown Copyright 2014 - 2019, Met Office # # This file is part of Iris. # @@ -27,71 +27,45 @@ from iris.coord_systems import GeogCS, VerticalPerspective -class Test_cartopy_crs(tests.IrisTest): +class Test(tests.IrisTest): def setUp(self): self.latitude_of_projection_origin = 0.0 self.longitude_of_projection_origin = 0.0 + self.perspective_point_height = 38204820000.0 + self.false_easting = 0.0 + self.false_northing = 0.0 + self.semi_major_axis = 6377563.396 self.semi_minor_axis = 6356256.909 - self.perspective_point_height = 38204820000.0 self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) + self.globe = ccrs.Globe(semimajor_axis=self.semi_major_axis, + semiminor_axis=self.semi_minor_axis, + ellipse=None) + + # Actual and expected coord system can be re-used for + # VerticalPerspective.test_crs_creation and test_projection_creation. + self.expected = ccrs.NearsidePerspective( + central_longitude=self.longitude_of_projection_origin, + central_latitude=self.latitude_of_projection_origin, + satellite_height=self.perspective_point_height, + false_easting=self.false_easting, + false_northing=self.false_northing, + globe=self.globe) self.vp_cs = VerticalPerspective(self.latitude_of_projection_origin, self.longitude_of_projection_origin, self.perspective_point_height, - ellipsoid=self.ellipsoid) + self.false_easting, + self.false_northing, + self.ellipsoid) def test_crs_creation(self): res = self.vp_cs.as_cartopy_crs() - globe = ccrs.Globe(semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None) - expected = ccrs.Geostationary( - self.longitude_of_projection_origin, - self.perspective_point_height, - globe=globe) - self.assertEqual(res, expected) - - -class Test_cartopy_projection(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 0.0 - self.longitude_of_projection_origin = 0.0 - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.perspective_point_height = 38204820000.0 - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.vp_cs = VerticalPerspective(self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.perspective_point_height, - ellipsoid=self.ellipsoid) + self.assertEqual(res, self.expected) def test_projection_creation(self): res = self.vp_cs.as_cartopy_projection() - globe = ccrs.Globe(semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None) - expected = ccrs.Geostationary( - self.longitude_of_projection_origin, - self.perspective_point_height, - globe=globe) - self.assertEqual(res, expected) - - -class Test_non_zero_lat(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 22.0 - self.longitude_of_projection_origin = 11.0 - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.perspective_point_height = 38204820000.0 - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) + self.assertEqual(res, self.expected) - def test_lat(self): - with self.assertRaises(ValueError): - res = VerticalPerspective(self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.perspective_point_height, - ellipsoid=self.ellipsoid) if __name__ == '__main__': tests.main() diff --git a/lib/iris/tests/unit/coords/__init__.py b/lib/iris/tests/unit/coords/__init__.py index 9d5da3a543..376b77829f 100644 --- a/lib/iris/tests/unit/coords/__init__.py +++ b/lib/iris/tests/unit/coords/__init__.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2013 - 2017, Met Office +# (C) British Crown Copyright 2013 - 2019, Met Office # # This file is part of Iris. # @@ -27,14 +27,16 @@ import dask.array as da import numpy as np +import numpy.ma as ma from iris._lazy_data import is_lazy_data -def setup_test_arrays(self, shape): - # Create standard coordinate points and bounds test arrays, +def setup_test_arrays(self, shape, masked=False): + # Create concrete and lazy coordinate points and bounds test arrays, # given a desired coord shape. - # Also create lazy versions, and save all on the 'self' object. + # If masked=True, also add masked arrays with some or no masked data, + # for both points and bounds, lazy and real. n_pts = np.prod(shape) # Note: the values must be integral for testing integer dtypes. points = 10.0 * np.arange(n_pts, dtype=float).reshape(shape) @@ -45,6 +47,25 @@ def setup_test_arrays(self, shape): self.pts_lazy = da.from_array(points, points.shape) self.bds_real = bounds self.bds_lazy = da.from_array(bounds, bounds.shape) + if masked: + mpoints = ma.array(points) + self.no_masked_pts_real = mpoints + self.no_masked_pts_lazy = da.from_array(mpoints, mpoints.shape, + asarray=False) + mpoints = ma.array(mpoints, copy=True) + mpoints[0] = ma.masked + self.masked_pts_real = mpoints + self.masked_pts_lazy = da.from_array(mpoints, mpoints.shape, + asarray=False) + mbounds = ma.array(bounds) + self.no_masked_bds_real = mbounds + self.no_masked_bds_lazy = da.from_array(mbounds, mbounds.shape, + asarray=False) + mbounds = ma.array(mbounds, copy=True) + mbounds[0] = ma.masked + self.masked_bds_real = mbounds + self.masked_bds_lazy = da.from_array(mbounds, mbounds.shape, + asarray=False) def is_real_data(array): @@ -96,8 +117,8 @@ def coords_all_dtypes_and_lazynesses(self, coord_class): class CoordTestMixin(object): - def setupTestArrays(self, shape=(3,)): - setup_test_arrays(self, shape=shape) + def setupTestArrays(self, shape=(3,), masked=False): + setup_test_arrays(self, shape=shape, masked=masked) def assertArraysShareData(self, a1, a2, *args, **kwargs): # Check that two arrays are both real, same dtype, and based on the diff --git a/lib/iris/tests/unit/coords/test_AuxCoord.py b/lib/iris/tests/unit/coords/test_AuxCoord.py index 3a5cf4151a..28af9f0318 100644 --- a/lib/iris/tests/unit/coords/test_AuxCoord.py +++ b/lib/iris/tests/unit/coords/test_AuxCoord.py @@ -30,6 +30,7 @@ import iris.tests as tests import numpy as np +import numpy.ma as ma from iris.tests.unit.coords import (CoordTestMixin, lazyness_string, @@ -42,15 +43,15 @@ class AuxCoordTestMixin(CoordTestMixin): # Define a 2-D default array shape. - def setupTestArrays(self, shape=(2, 3)): - super(AuxCoordTestMixin, self).setupTestArrays(shape) + def setupTestArrays(self, shape=(2, 3), masked=False): + super(AuxCoordTestMixin, self).setupTestArrays(shape, masked=masked) class Test__init__(tests.IrisTest, AuxCoordTestMixin): # Test for AuxCoord creation, with various combinations of points and # bounds = real / lazy / None. def setUp(self): - self.setupTestArrays() + self.setupTestArrays(masked=True) def test_lazyness_and_dtype_combinations(self): for (coord, points_type_name, bounds_type_name) in \ @@ -111,6 +112,82 @@ def test_fail_bounds_shape_mismatch(self): with self.assertRaisesRegexp(ValueError, msg): AuxCoord(self.pts_real, bounds=bds_wrong) + def test_no_masked_pts_real(self): + data = self.no_masked_pts_real + self.assertTrue(ma.isMaskedArray(data)) + self.assertEqual(ma.count_masked(data), 0) + coord = AuxCoord(data) + self.assertFalse(coord.has_lazy_points()) + self.assertTrue(ma.isMaskedArray(coord.points)) + self.assertEqual(ma.count_masked(coord.points), 0) + + def test_no_masked_pts_lazy(self): + data = self.no_masked_pts_lazy + computed = data.compute() + self.assertTrue(ma.isMaskedArray(computed)) + self.assertEqual(ma.count_masked(computed), 0) + coord = AuxCoord(data) + self.assertTrue(coord.has_lazy_points()) + self.assertTrue(ma.isMaskedArray(coord.points)) + self.assertEqual(ma.count_masked(coord.points), 0) + + def test_masked_pts_real(self): + data = self.masked_pts_real + self.assertTrue(ma.isMaskedArray(data)) + self.assertTrue(ma.count_masked(data)) + coord = AuxCoord(data) + self.assertFalse(coord.has_lazy_points()) + self.assertTrue(ma.isMaskedArray(coord.points)) + self.assertTrue(ma.count_masked(coord.points)) + + def test_masked_pts_lazy(self): + data = self.masked_pts_lazy + computed = data.compute() + self.assertTrue(ma.isMaskedArray(computed)) + self.assertTrue(ma.count_masked(computed)) + coord = AuxCoord(data) + self.assertTrue(coord.has_lazy_points()) + self.assertTrue(ma.isMaskedArray(coord.points)) + self.assertTrue(ma.count_masked(coord.points)) + + def test_no_masked_bds_real(self): + data = self.no_masked_bds_real + self.assertTrue(ma.isMaskedArray(data)) + self.assertEqual(ma.count_masked(data), 0) + coord = AuxCoord(self.pts_real, bounds=data) + self.assertFalse(coord.has_lazy_bounds()) + self.assertTrue(ma.isMaskedArray(coord.bounds)) + self.assertEqual(ma.count_masked(coord.bounds), 0) + + def test_no_masked_bds_lazy(self): + data = self.no_masked_bds_lazy + computed = data.compute() + self.assertTrue(ma.isMaskedArray(computed)) + self.assertEqual(ma.count_masked(computed), 0) + coord = AuxCoord(self.pts_real, bounds=data) + self.assertTrue(coord.has_lazy_bounds()) + self.assertTrue(ma.isMaskedArray(coord.bounds)) + self.assertEqual(ma.count_masked(coord.bounds), 0) + + def test_masked_bds_real(self): + data = self.masked_bds_real + self.assertTrue(ma.isMaskedArray(data)) + self.assertTrue(ma.count_masked(data)) + coord = AuxCoord(self.pts_real, bounds=data) + self.assertFalse(coord.has_lazy_bounds()) + self.assertTrue(ma.isMaskedArray(coord.bounds)) + self.assertTrue(ma.count_masked(coord.bounds)) + + def test_masked_bds_lazy(self): + data = self.masked_bds_lazy + computed = data.compute() + self.assertTrue(ma.isMaskedArray(computed)) + self.assertTrue(ma.count_masked(computed)) + coord = AuxCoord(self.pts_real, bounds=data) + self.assertTrue(coord.has_lazy_bounds()) + self.assertTrue(ma.isMaskedArray(coord.bounds)) + self.assertTrue(ma.count_masked(coord.bounds)) + class Test_core_points(tests.IrisTest, AuxCoordTestMixin): # Test for AuxCoord.core_points() with various types of points and bounds. diff --git a/lib/iris/tests/unit/coords/test_CellMeasure.py b/lib/iris/tests/unit/coords/test_CellMeasure.py index 109357aabc..bb839e86f9 100644 --- a/lib/iris/tests/unit/coords/test_CellMeasure.py +++ b/lib/iris/tests/unit/coords/test_CellMeasure.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2015 - 2018, Met Office +# (C) British Crown Copyright 2015 - 2019, Met Office # # This file is part of Iris. # @@ -72,18 +72,6 @@ def test_set_data__lazy(self): self.measure.data = new_vals self.assertArrayEqual(self.measure.data, new_vals) - def test_set_data__int__lazy(self): - new_vals = as_lazy_data(np.array((1, 2, 3, 4), dtype=np.int32)) - exp_emsg = "Cannot create cell measure with lazy data of type int32" - with self.assertRaisesRegexp(ValueError, exp_emsg): - self.measure.data = new_vals - - def test_set_data__uint__lazy(self): - new_vals = as_lazy_data(np.array((1, 2, 3, 4), dtype=np.uint32)) - exp_emsg = "Cannot create cell measure with lazy data of type uint32" - with self.assertRaisesRegexp(ValueError, exp_emsg): - self.measure.data = new_vals - def test_data_different_shape(self): new_vals = np.array((1., 2., 3.)) msg = 'New data shape must match existing data shape.' @@ -133,5 +121,8 @@ def test__repr__(self): "var_name='area', attributes={'notes': '1m accuracy'})") self.assertEqual(self.measure.__repr__(), expected) + def test__eq__(self): + self.assertEqual(self.measure, self.measure) + if __name__ == '__main__': tests.main() diff --git a/lib/iris/tests/unit/coords/test_CellMethod.py b/lib/iris/tests/unit/coords/test_CellMethod.py new file mode 100644 index 0000000000..133967717c --- /dev/null +++ b/lib/iris/tests/unit/coords/test_CellMethod.py @@ -0,0 +1,106 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Unit tests for the :class:`iris.coords.CellMethod`. +""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from iris._cube_coord_common import CFVariableMixin +from iris.coords import CellMethod, Coord + + +class Test(tests.IrisTest): + def setUp(self): + self.method = 'mean' + + def _check(self, token, coord, default=False): + result = CellMethod(self.method, coords=coord) + token = token if not default else CFVariableMixin._DEFAULT_NAME + expected = '{}: {}'.format(self.method, token) + self.assertEqual(str(result), expected) + + def test_coord_standard_name(self): + token = 'air_temperature' + coord = Coord(1, standard_name=token) + self._check(token, coord) + + def test_coord_long_name(self): + token = 'long_name' + coord = Coord(1, long_name=token) + self._check(token, coord) + + def test_coord_long_name_default(self): + token = 'long name' # includes space + coord = Coord(1, long_name=token) + self._check(token, coord, default=True) + + def test_coord_var_name(self): + token = 'var_name' + coord = Coord(1, var_name=token) + self._check(token, coord) + + def test_coord_var_name_fail(self): + token = 'var name' # includes space + emsg = 'is not a valid NetCDF variable name' + with self.assertRaisesRegexp(ValueError, emsg): + Coord(1, var_name=token) + + def test_coord_stash(self): + token = 'stash' + coord = Coord(1, attributes=dict(STASH=token)) + self._check(token, coord) + + def test_coord_stash_default(self): + token = '_stash' # includes leading underscore + coord = Coord(1, attributes=dict(STASH=token)) + self._check(token, coord, default=True) + + def test_string(self): + token = 'air_temperature' + result = CellMethod(self.method, coords=token) + expected = '{}: {}'.format(self.method, token) + self.assertEqual(str(result), expected) + + def test_string_default(self): + token = 'air temperature' # includes space + result = CellMethod(self.method, coords=token) + expected = '{}: unknown'.format(self.method) + self.assertEqual(str(result), expected) + + def test_mixture(self): + token = 'air_temperature' + coord = Coord(1, standard_name=token) + result = CellMethod(self.method, coords=[coord, token]) + expected = '{}: {}, {}'.format(self.method, token, token) + self.assertEqual(str(result), expected) + + def test_mixture_default(self): + token = 'air temperature' # includes space + coord = Coord(1, long_name=token) + result = CellMethod(self.method, coords=[coord, token]) + expected = '{}: unknown, unknown'.format(self.method, token, token) + self.assertEqual(str(result), expected) + + +if __name__ == '__main__': + tests.main() diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index d42448d609..314d990984 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2013 - 2018, Met Office +# (C) British Crown Copyright 2013 - 2019, Met Office # # This file is part of Iris. # @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public License # along with Iris. If not, see . """Unit tests for the :class:`iris.coords.Coord` class.""" - from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa @@ -24,10 +23,11 @@ import iris.tests as tests import collections -import mock import warnings +import dask.array as da import numpy as np +import six import iris from iris.coords import DimCoord, AuxCoord, Coord @@ -279,6 +279,13 @@ def test_dim_1d(self): self.assertArrayEqual(collapsed_coord.bounds, [[coord.bounds.min(), coord.bounds.max()]]) + def test_lazy_points(self): + # Lazy points should stay lazy after collapse. + coord = AuxCoord(points=da.from_array(np.arange(5), chunks=5)) + collapsed_coord = coord.collapsed() + self.assertTrue(collapsed_coord.has_lazy_bounds()) + self.assertTrue(collapsed_coord.has_lazy_points()) + def test_numeric_nd(self): coord = AuxCoord(points=np.array([[1, 2, 4, 5], [4, 5, 7, 8], @@ -303,9 +310,46 @@ def test_numeric_nd(self): [4, 10], [5, 11]])) - def test_lazy_nd_bounds(self): - import dask.array as da + def test_numeric_nd_bounds_all(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_real, bounds=self.bds_real) + + collapsed_coord = coord.collapsed() + self.assertArrayEqual(collapsed_coord.points, np.array([55])) + self.assertArrayEqual(collapsed_coord.bounds, np.array([[-2, 112]])) + + def test_numeric_nd_bounds_second(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_real, bounds=self.bds_real) + collapsed_coord = coord.collapsed(1) + self.assertArrayEqual(collapsed_coord.points, np.array([15, 55, 95])) + self.assertArrayEqual(collapsed_coord.bounds, np.array([[-2, 32], + [38, 72], + [78, 112]])) + def test_numeric_nd_bounds_first(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_real, bounds=self.bds_real) + # ... and the other.. + collapsed_coord = coord.collapsed(0) + self.assertArrayEqual( + collapsed_coord.points, np.array([40, 50, 60, 70])) + self.assertArrayEqual(collapsed_coord.bounds, np.array([[-2, 82], + [8, 92], + [18, 102], + [28, 112]])) + + def test_numeric_nd_bounds_last(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_real, bounds=self.bds_real) + # ... and again with -ve dimension specification. + collapsed_coord = coord.collapsed(-1) + self.assertArrayEqual(collapsed_coord.points, np.array([15, 55, 95])) + self.assertArrayEqual(collapsed_coord.bounds, np.array([[-2, 32], + [38, 72], + [78, 112]])) + + def test_lazy_nd_bounds_all(self): self.setupTestArrays((3, 4)) coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) @@ -319,8 +363,39 @@ def test_lazy_nd_bounds(self): self.assertArrayEqual(collapsed_coord.points, np.array([55])) self.assertArrayEqual(collapsed_coord.bounds, da.array([[-2, 112]])) + def test_lazy_nd_bounds_second(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) + + collapsed_coord = coord.collapsed(1) + self.assertArrayEqual(collapsed_coord.points, np.array([15, 55, 95])) + self.assertArrayEqual(collapsed_coord.bounds, np.array([[-2, 32], + [38, 72], + [78, 112]])) + + def test_lazy_nd_bounds_first(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) + + collapsed_coord = coord.collapsed(0) + self.assertArrayEqual( + collapsed_coord.points, np.array([40, 50, 60, 70])) + self.assertArrayEqual(collapsed_coord.bounds, np.array([[-2, 82], + [8, 92], + [18, 102], + [28, 112]])) + + def test_lazy_nd_bounds_last(self): + self.setupTestArrays((3, 4)) + coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) + + collapsed_coord = coord.collapsed(-1) + self.assertArrayEqual(collapsed_coord.points, np.array([15, 55, 95])) + self.assertArrayEqual(collapsed_coord.bounds, np.array([[-2, 32], + [38, 72], + [78, 112]])) + def test_lazy_nd_points_and_bounds(self): - import dask.array as da self.setupTestArrays((3, 4)) coord = AuxCoord(self.pts_lazy, bounds=self.bds_lazy) @@ -713,5 +788,70 @@ def test_non_time_unit(self): self.assertEqual(expected, result) +class TestClimatology(tests.IrisTest): + # Variety of tests for the climatological property of a coord. + # Only using AuxCoord since there is no different behaviour between Aux + # and DimCoords for this property. + + def test_create(self): + coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]], + units='days since 1970-01-01', + climatological=True) + self.assertTrue(coord.climatological) + + def test_create_no_bounds_no_set(self): + with six.assertRaisesRegex(self, ValueError, + 'Cannot set.*no bounds exist'): + AuxCoord(points=[0, 1], units='days since 1970-01-01', + climatological=True) + + def test_create_no_time_no_set(self): + emsg = 'Cannot set climatological .* valid time reference units.*' + with six.assertRaisesRegex(self, TypeError, emsg): + AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]], + climatological=True) + + def test_absent(self): + coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]]) + self.assertFalse(coord.climatological) + + def test_absent_no_bounds_no_set(self): + coord = AuxCoord(points=[0, 1], units='days since 1970-01-01') + with six.assertRaisesRegex(self, ValueError, + 'Cannot set.*no bounds exist'): + coord.climatological = True + + def test_absent_no_time_no_set(self): + coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]]) + emsg = 'Cannot set climatological .* valid time reference units.*' + with six.assertRaisesRegex(self, TypeError, emsg): + coord.climatological = True + + def test_absent_no_bounds_unset(self): + coord = AuxCoord(points=[0, 1]) + coord.climatological = False + self.assertFalse(coord.climatological) + + def test_bounds_set(self): + coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]], + units='days since 1970-01-01') + coord.climatological = True + self.assertTrue(coord.climatological) + + def test_bounds_unset(self): + coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]], + units='days since 1970-01-01', + climatological=True) + coord.climatological = False + self.assertFalse(coord.climatological) + + def test_remove_bounds(self): + coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]], + units='days since 1970-01-01', + climatological=True) + coord.bounds = None + self.assertFalse(coord.climatological) + + if __name__ == '__main__': tests.main() diff --git a/lib/iris/tests/unit/coords/test_DimCoord.py b/lib/iris/tests/unit/coords/test_DimCoord.py index c373a1145c..f6815dbe83 100644 --- a/lib/iris/tests/unit/coords/test_DimCoord.py +++ b/lib/iris/tests/unit/coords/test_DimCoord.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2017, Met Office +# (C) British Crown Copyright 2017 - 2019, Met Office # # This file is part of Iris. # @@ -24,12 +24,14 @@ from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa +import six # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests import numpy as np +import numpy.ma as ma from iris.tests.unit.coords import (CoordTestMixin, lazyness_string, @@ -40,15 +42,15 @@ class DimCoordTestMixin(CoordTestMixin): # Define a 1-D default array shape. - def setupTestArrays(self, shape=(3, )): - super(DimCoordTestMixin, self).setupTestArrays(shape) + def setupTestArrays(self, shape=(3, ), masked=False): + super(DimCoordTestMixin, self).setupTestArrays(shape, masked=masked) class Test__init__(tests.IrisTest, DimCoordTestMixin): # Test for DimCoord creation, with various combinations of points and # bounds = real / lazy / None. def setUp(self): - self.setupTestArrays() + self.setupTestArrays(masked=True) def test_lazyness_and_dtype_combinations(self): for (coord, points_type_name, bounds_type_name) in \ @@ -78,7 +80,7 @@ def test_fail_bounds_shape_mismatch(self): bds_shape = list(self.bds_real.shape) bds_shape[0] += 1 bds_wrong = np.zeros(bds_shape) - msg = 'The shape of the bounds array should be' + msg = "The shape of the 'unknown' DimCoord bounds array should be" with self.assertRaisesRegexp(ValueError, msg): DimCoord(self.pts_real, bounds=bds_wrong) @@ -87,6 +89,78 @@ def test_fail_nonmonotonic(self): with self.assertRaisesRegexp(ValueError, msg): DimCoord([1, 2, 0, 3]) + def test_no_masked_pts_real(self): + data = self.no_masked_pts_real + self.assertTrue(ma.isMaskedArray(data)) + self.assertEqual(ma.count_masked(data), 0) + coord = DimCoord(data) + self.assertFalse(coord.has_lazy_points()) + self.assertFalse(ma.isMaskedArray(coord.points)) + self.assertEqual(ma.count_masked(coord.points), 0) + + def test_no_masked_pts_lazy(self): + data = self.no_masked_pts_lazy + computed = data.compute() + self.assertTrue(ma.isMaskedArray(computed)) + self.assertEqual(ma.count_masked(computed), 0) + coord = DimCoord(data) + # DimCoord always realises its points. + self.assertFalse(coord.has_lazy_points()) + self.assertFalse(ma.isMaskedArray(coord.points)) + + def test_masked_pts_real(self): + data = self.masked_pts_real + self.assertTrue(ma.isMaskedArray(data)) + self.assertTrue(ma.count_masked(data)) + emsg = 'points array must not be masked' + with six.assertRaisesRegex(self, TypeError, emsg): + DimCoord(data) + + def test_masked_pts_lazy(self): + data = self.masked_pts_lazy + computed = data.compute() + self.assertTrue(ma.isMaskedArray(computed)) + self.assertTrue(ma.count_masked(computed)) + emsg = 'points array must not be masked' + with six.assertRaisesRegex(self, TypeError, emsg): + DimCoord(data) + + def test_no_masked_bds_real(self): + data = self.no_masked_bds_real + self.assertTrue(ma.isMaskedArray(data)) + self.assertEqual(ma.count_masked(data), 0) + coord = DimCoord(self.pts_real, bounds=data) + self.assertFalse(coord.has_lazy_bounds()) + self.assertFalse(ma.isMaskedArray(coord.bounds)) + self.assertEqual(ma.count_masked(coord.bounds), 0) + + def test_no_masked_bds_lazy(self): + data = self.no_masked_bds_lazy + computed = data.compute() + self.assertTrue(ma.isMaskedArray(computed)) + self.assertEqual(ma.count_masked(computed), 0) + coord = DimCoord(self.pts_real, bounds=data) + # DimCoord always realises its bounds. + self.assertFalse(coord.has_lazy_bounds()) + self.assertFalse(ma.isMaskedArray(coord.bounds)) + + def test_masked_bds_real(self): + data = self.masked_bds_real + self.assertTrue(ma.isMaskedArray(data)) + self.assertTrue(ma.count_masked(data)) + emsg = 'bounds array must not be masked' + with six.assertRaisesRegex(self, TypeError, emsg): + DimCoord(self.pts_real, bounds=data) + + def test_masked_bds_lazy(self): + data = self.masked_bds_lazy + computed = data.compute() + self.assertTrue(ma.isMaskedArray(computed)) + self.assertTrue(ma.count_masked(computed)) + emsg = 'bounds array must not be masked' + with six.assertRaisesRegex(self, TypeError, emsg): + DimCoord(self.pts_real, bounds=data) + class Test_core_points(tests.IrisTest, DimCoordTestMixin): # Test for DimCoord.core_points() with various types of points and bounds. @@ -452,7 +526,7 @@ def test_set_real(self): def test_fail_bad_shape(self): # Setting real points requires matching shape. coord = DimCoord(self.pts_real, bounds=self.bds_real) - msg = 'The shape of the bounds array should be' + msg = "The shape of the 'unknown' DimCoord bounds array should be" with self.assertRaisesRegexp(ValueError, msg): coord.bounds = np.array([1.0, 2.0, 3.0]) self.assertArrayEqual(coord.bounds, self.bds_real) diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 13fbc5660f..ea2c3c2dfe 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2013 - 2018, Met Office +# (C) British Crown Copyright 2013 - 2019, Met Office # # This file is part of Iris. # @@ -476,6 +476,7 @@ def setUp(self): self.mock_agg.aggregate = mock.Mock( return_value=mock.Mock(dtype='object')) self.mock_agg.aggregate_shape = mock.Mock(return_value=()) + self.mock_agg.lazy_func = None self.mock_agg.post_process = mock.Mock(side_effect=lambda x, y, z: x) def test_2d_coord_simple_agg(self): @@ -549,6 +550,86 @@ def test_single_string_aggregation(self): AuxCoord(['a|a', 'a'], long_name='bar')) +class Test_aggregated_by__lazy(tests.IrisTest): + def setUp(self): + self.data = np.arange(44).reshape(4, 11) + self.lazydata = as_lazy_data(self.data) + self.cube = Cube(self.lazydata) + + val_coord = AuxCoord([0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], + long_name="val") + label_coord = AuxCoord(['alpha', 'alpha', 'beta', + 'beta', 'alpha', 'gamma', + 'alpha', 'alpha', 'alpha', + 'gamma', 'beta'], + long_name='label', units='no_unit') + simple_agg_coord = AuxCoord([1, 1, 2, 2], long_name='simple_agg') + + self.label_mean = np.array( + [[4.+1./3., 5., 7.], + [15.+1./3., 16., 18.], + [26.+1./3., 27., 29.], + [37.+1./3., 38., 40.]]) + self.val_mean = np.array( + [[4.+1./6., 5.+2./3., 6.5], + [15.+1./6., 16.+2./3., 17.5], + [26.+1./6., 27.+2./3., 28.5], + [37.+1./6., 38.+2./3., 39.5]]) + + self.cube.add_aux_coord(simple_agg_coord, 0) + self.cube.add_aux_coord(val_coord, 1) + self.cube.add_aux_coord(label_coord, 1) + + def test_agg_by_label__lazy(self): + # Aggregate a cube on a string coordinate label where label + # and val entries are not in step; the resulting cube has a val + # coord of bounded cells and a label coord of single string entries. + res_cube = self.cube.aggregated_by('label', MEAN) + val_coord = AuxCoord(np.array([1., 0.5, 1.]), + bounds=np.array([[0, 2], [0, 1], [2, 0]]), + long_name='val') + label_coord = AuxCoord(np.array(['alpha', 'beta', 'gamma']), + long_name='label', units='no_unit') + self.assertTrue(res_cube.has_lazy_data()) + self.assertEqual(res_cube.coord('val'), val_coord) + self.assertEqual(res_cube.coord('label'), label_coord) + self.assertArrayEqual(res_cube.data, self.label_mean) + self.assertFalse(res_cube.has_lazy_data()) + + def test_agg_by_val__lazy(self): + # Aggregate a cube on a numeric coordinate val where label + # and val entries are not in step; the resulting cube has a label + # coord with serialised labels from the aggregated cells. + res_cube = self.cube.aggregated_by('val', MEAN) + val_coord = AuxCoord(np.array([0, 1, 2]), long_name='val') + exp0 = 'alpha|alpha|beta|alpha|alpha|gamma' + exp1 = 'beta|alpha|beta' + exp2 = 'gamma|alpha' + label_coord = AuxCoord(np.array((exp0, exp1, exp2)), + long_name='label', units='no_unit') + self.assertTrue(res_cube.has_lazy_data()) + self.assertEqual(res_cube.coord('val'), val_coord) + self.assertEqual(res_cube.coord('label'), label_coord) + self.assertArrayEqual(res_cube.data, self.val_mean) + self.assertFalse(res_cube.has_lazy_data()) + + def test_single_string_aggregation__lazy(self): + aux_coords = [(AuxCoord(['a', 'b', 'a'], long_name='foo'), 0), + (AuxCoord(['a', 'a', 'a'], long_name='bar'), 0)] + cube = iris.cube.Cube(as_lazy_data(np.arange(12).reshape(3, 4)), + aux_coords_and_dims=aux_coords) + means = np.array( + [[4., 5., 6., 7.], + [4., 5., 6., 7.]]) + result = cube.aggregated_by('foo', MEAN) + self.assertTrue(result.has_lazy_data()) + self.assertEqual(result.shape, (2, 4)) + self.assertEqual(result.coord('bar'), + AuxCoord(['a|a', 'a'], long_name='bar')) + self.assertArrayEqual(result.data, means) + self.assertFalse(result.has_lazy_data()) + + class Test_rolling_window(tests.IrisTest): def setUp(self): self.cube = Cube(np.arange(6)) @@ -1475,9 +1556,7 @@ def test__masked_scalar_arraymask(self): self._check_copy(cube, cube.copy()) def test__lazy(self): - # Note: multiple chunks added as a workaround suggested to dask#3751, - # which is fixed in dask#3754. - cube = Cube(as_lazy_data(np.array([1, 0]), chunks=(1, 1))) + cube = Cube(as_lazy_data(np.array([1, 0]))) self._check_copy(cube, cube.copy()) @@ -1648,6 +1727,15 @@ def test_remove_cell_measure(self): self.assertEqual(self.cube._cell_measures_and_dims, [[self.b_cell_measure, (0, 1)]]) + def test_remove_cell_measure_by_name(self): + self.cube.remove_cell_measure('area') + self.assertEqual(self.cube._cell_measures_and_dims, + [[self.b_cell_measure, (0, 1)]]) + + def test_fail_remove_cell_measure_by_name(self): + with self.assertRaises(CellMeasureNotFoundError): + self.cube.remove_cell_measure('notarea') + class Test__getitem_CellMeasure(tests.IrisTest): def setUp(self): @@ -1775,6 +1863,29 @@ def test_bad_transpose_order(self): with self.assertRaisesRegexp(ValueError, exp_emsg): self.cube.transpose([1]) + def test_dim_coords(self): + x_coord = DimCoord(points=np.array([2, 3, 4]), + long_name='x') + self.cube.add_dim_coord(x_coord, 0) + self.cube.transpose() + self.assertEqual(self.cube._dim_coords_and_dims, [(x_coord, 2)]) + + def test_aux_coords(self): + x_coord = AuxCoord(points=np.array([[2, 3], [8, 4], [7, 9]]), + long_name='x') + self.cube.add_aux_coord(x_coord, (0, 1)) + self.cube.transpose() + self.assertEqual(self.cube._aux_coords_and_dims, + [(x_coord, (2, 1))]) + + def test_cell_measures(self): + area_cm = CellMeasure(data=np.arange(12).reshape(3, 4), + long_name='area of cells', measure='area') + self.cube.add_cell_measure(area_cm, (0, 2)) + self.cube.transpose() + self.assertEqual(self.cube._cell_measures_and_dims, + [(area_cm, (2, 0))]) + class Test_convert_units(tests.IrisTest): def test_convert_unknown_units(self): @@ -1794,5 +1905,51 @@ def test_preserves_lazy(self): self.assertArrayAllClose(cube.data, real_data_ft) +class Test__eq__data(tests.IrisTest): + """Partial cube equality testing, for data type only.""" + def test_data_float_eq(self): + cube1 = Cube([1.0]) + cube2 = Cube([1.0]) + self.assertTrue(cube1 == cube2) + + def test_data_float_eqtol(self): + val1 = np.array(1.0, dtype=np.float32) + # NOTE: Since v2.3, Iris uses "allclose". Prior to that we used + # "rtol=1e-8", and this example would *fail*. + val2 = np.array(1.0 + 1.e-6, dtype=np.float32) + cube1 = Cube([val1]) + cube2 = Cube([val2]) + self.assertNotEqual(val1, val2) + self.assertTrue(cube1 == cube2) + + def test_data_float_not_eq(self): + val1 = 1.0 + val2 = 1.0 + 1.e-4 + cube1 = Cube([1.0, val1]) + cube2 = Cube([1.0, val2]) + self.assertFalse(cube1 == cube2) + + def test_data_int_eq(self): + cube1 = Cube([1, 2, 3]) + cube2 = Cube([1, 2, 3]) + self.assertTrue(cube1 == cube2) + + def test_data_int_not_eq(self): + cube1 = Cube([1, 2, 3]) + cube2 = Cube([1, 2, 0]) + self.assertFalse(cube1 == cube2) + + # NOTE: since numpy v1.18, boolean array subtract is deprecated. + def test_data_bool_eq(self): + cube1 = Cube([True, False]) + cube2 = Cube([True, False]) + self.assertTrue(cube1 == cube2) + + def test_data_bool_not_eq(self): + cube1 = Cube([True, False]) + cube2 = Cube([True, True]) + self.assertFalse(cube1 == cube2) + + if __name__ == '__main__': tests.main() diff --git a/lib/iris/tests/unit/cube_coord_common/__init__.py b/lib/iris/tests/unit/cube_coord_common/__init__.py new file mode 100644 index 0000000000..ff307991e3 --- /dev/null +++ b/lib/iris/tests/unit/cube_coord_common/__init__.py @@ -0,0 +1,20 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +"""Unit tests for the :mod:`iris._cube_coord_common` module.""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa diff --git a/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py b/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py new file mode 100644 index 0000000000..9366bb848a --- /dev/null +++ b/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py @@ -0,0 +1,167 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Unit tests for the :class:`iris._cube_coord_common.CFVariableMixin`. +""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from iris._cube_coord_common import CFVariableMixin + + +class Test_token(tests.IrisTest): + def test_passthru_None(self): + result = CFVariableMixin.token(None) + self.assertIsNone(result) + + def test_fail_leading_underscore(self): + result = CFVariableMixin.token('_nope') + self.assertIsNone(result) + + def test_fail_leading_dot(self): + result = CFVariableMixin.token('.nope') + self.assertIsNone(result) + + def test_fail_leading_plus(self): + result = CFVariableMixin.token('+nope') + self.assertIsNone(result) + + def test_fail_leading_at(self): + result = CFVariableMixin.token('@nope') + self.assertIsNone(result) + + def test_fail_space(self): + result = CFVariableMixin.token('nope nope') + self.assertIsNone(result) + + def test_fail_colon(self): + result = CFVariableMixin.token('nope:') + self.assertIsNone(result) + + def test_pass_simple(self): + token = 'simple' + result = CFVariableMixin.token(token) + self.assertEqual(result, token) + + def test_pass_leading_digit(self): + token = '123simple' + result = CFVariableMixin.token(token) + self.assertEqual(result, token) + + def test_pass_mixture(self): + token = 'S.imple@one+two_3' + result = CFVariableMixin.token(token) + self.assertEqual(result, token) + + +class Test_name(tests.IrisTest): + def setUp(self): + # None token CFVariableMixin + self.cf_var = CFVariableMixin() + self.cf_var.standard_name = None + self.cf_var.long_name = None + self.cf_var.var_name = None + self.cf_var.attributes = {} + self.default = CFVariableMixin._DEFAULT_NAME + # bad token CFVariableMixin + self.cf_bad = CFVariableMixin() + self.cf_bad.standard_name = None + self.cf_bad.long_name = 'nope nope' + self.cf_bad.var_name = None + self.cf_bad.attributes = {'STASH': 'nope nope'} + + def test_standard_name(self): + token = 'air_temperature' + self.cf_var.standard_name = token + result = self.cf_var.name() + self.assertEqual(result, token) + + def test_long_name(self): + token = 'long_name' + self.cf_var.long_name = token + result = self.cf_var.name() + self.assertEqual(result, token) + + def test_var_name(self): + token = 'var_name' + self.cf_var.var_name = token + result = self.cf_var.name() + self.assertEqual(result, token) + + def test_stash(self): + token = 'stash' + self.cf_var.attributes['STASH'] = token + result = self.cf_var.name() + self.assertEqual(result, token) + + def test_default(self): + result = self.cf_var.name() + self.assertEqual(result, self.default) + + def test_token_long_name(self): + token = 'long_name' + self.cf_bad.long_name = token + result = self.cf_bad.name(token=True) + self.assertEqual(result, token) + + def test_token_var_name(self): + token = 'var_name' + self.cf_bad.var_name = token + result = self.cf_bad.name(token=True) + self.assertEqual(result, token) + + def test_token_stash(self): + token = 'stash' + self.cf_bad.attributes['STASH'] = token + result = self.cf_bad.name(token=True) + self.assertEqual(result, token) + + def test_token_default(self): + result = self.cf_var.name(token=True) + self.assertEqual(result, self.default) + + def test_fail_token_default(self): + emsg = 'Cannot retrieve a valid name token' + with self.assertRaisesRegexp(ValueError, emsg): + self.cf_var.name(default='_nope', token=True) + + +class Test_standard_name__setter(tests.IrisTest): + def test_valid_standard_name(self): + cf_var = CFVariableMixin() + cf_var.standard_name = 'air_temperature' + self.assertEqual(cf_var.standard_name, 'air_temperature') + + def test_invalid_standard_name(self): + cf_var = CFVariableMixin() + emsg = "'not_a_standard_name' is not a valid standard_name" + with self.assertRaisesRegexp(ValueError, emsg): + cf_var.standard_name = 'not_a_standard_name' + + def test_none_standard_name(self): + cf_var = CFVariableMixin() + cf_var.standard_name = None + self.assertIsNone(cf_var.standard_name) + + +if __name__ == '__main__': + tests.main() diff --git a/lib/iris/tests/unit/cube_coord_common/test_get_valid_standard_name.py b/lib/iris/tests/unit/cube_coord_common/test_get_valid_standard_name.py new file mode 100644 index 0000000000..6e50aff2d9 --- /dev/null +++ b/lib/iris/tests/unit/cube_coord_common/test_get_valid_standard_name.py @@ -0,0 +1,70 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Unit tests for the :func:`iris._cube_coord_common.get_valid_standard_name`. + +""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from iris._cube_coord_common import get_valid_standard_name + + +class Test(tests.IrisTest): + def setUp(self): + self.emsg = "'{}' is not a valid standard_name" + + def test_valid_standard_name(self): + name = 'air_temperature' + self.assertEqual(get_valid_standard_name(name), name) + + def test_invalid_standard_name(self): + name = 'not_a_standard_name' + with self.assertRaisesRegexp(ValueError, self.emsg.format(name)): + get_valid_standard_name(name) + + def test_valid_standard_name_valid_modifier(self): + name = 'air_temperature standard_error' + self.assertEqual(get_valid_standard_name(name), name) + + def test_valid_standard_name_valid_modifier_extra_spaces(self): + name = 'air_temperature standard_error' + self.assertEqual(get_valid_standard_name(name), name) + + def test_invalid_standard_name_valid_modifier(self): + name = 'not_a_standard_name standard_error' + with self.assertRaisesRegexp(ValueError, self.emsg.format(name)): + get_valid_standard_name(name) + + def test_valid_standard_invalid_name_modifier(self): + name = 'air_temperature extra_names standard_error' + with self.assertRaisesRegexp(ValueError, self.emsg.format(name)): + get_valid_standard_name(name) + + def test_valid_standard_valid_name_modifier_extra_names(self): + name = 'air_temperature standard_error extra words' + with self.assertRaisesRegexp(ValueError, self.emsg.format(name)): + get_valid_standard_name(name) + + +if __name__ == '__main__': + tests.main() diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py new file mode 100644 index 0000000000..a9cf07ef9b --- /dev/null +++ b/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py @@ -0,0 +1,83 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +"""Unit tests for the `iris.cube.CubeRepresentation` class.""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from iris.cube import CubeList +import iris.tests.stock as stock + +from iris.experimental.representation import CubeListRepresentation + + +@tests.skip_data +class Test__instantiation(tests.IrisTest): + def setUp(self): + self.cubes = CubeList([stock.simple_3d()]) + self.representer = CubeListRepresentation(self.cubes) + + def test_ids(self): + self.assertEqual(id(self.cubes), self.representer.cubelist_id) + + +@tests.skip_data +class Test_make_content(tests.IrisTest): + def setUp(self): + self.cubes = CubeList([stock.simple_3d(), + stock.lat_lon_cube()]) + self.representer = CubeListRepresentation(self.cubes) + self.content = self.representer.make_content() + + def test_repr_len(self): + self.assertEqual(len(self.cubes), len(self.content)) + + def test_summary_lines(self): + names = [c.name() for c in self.cubes] + for name, content in zip(names, self.content): + self.assertIn(name, content) + + def test__cube_name_summary_consistency(self): + # Just check the first cube in the CubeList. + single_cube_html = self.content[0] + first_contents_line = single_cube_html.split('\n')[1] + # Get the cube name out of the repr html... + cube_name = first_contents_line.split('>0: ')[1].split('/')[0] + # ... and prettify it (to be the same as in the following cube repr). + pretty_cube_name = cube_name.strip().replace('_', ' ').title() + self.assertIn(pretty_cube_name, single_cube_html) + + +@tests.skip_data +class Test_repr_html(tests.IrisTest): + def setUp(self): + self.cubes = CubeList([stock.simple_3d(), + stock.lat_lon_cube()]) + self.representer = CubeListRepresentation(self.cubes) + + def test_html_length(self): + html = self.representer.repr_html() + n_html_elems = html.count(' tag per cube. + self.assertEqual(len(self.cubes), n_html_elems) + + +if __name__ == '__main__': + tests.main() diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py index c94742a49a..fa05c00abf 100644 --- a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py +++ b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2017 - 2018, Met Office +# (C) British Crown Copyright 2017 - 2019, Met Office # # This file is part of Iris. # @@ -290,6 +290,21 @@ def test__attribute_row(self): self.assertIn(colspan_str, row_str) +@tests.skip_data +class Test__expand_last_cell(tests.IrisTest): + def setUp(self): + self.cube = stock.simple_3d() + self.representer = CubeRepresentation(self.cube) + self.representer._get_bits(self.representer._get_lines()) + col_span = self.representer.ndims + self.row = self.representer._make_row('title', body='first', + col_span=col_span) + + def test_add_line(self): + cell = self.representer._expand_last_cell(self.row[-2], 'second') + self.assertIn('first
    second', cell) + + @tests.skip_data class Test__make_content(tests.IrisTest): def setUp(self): @@ -312,6 +327,14 @@ def test_not_included(self): for heading in not_included: self.assertNotIn(heading, self.result) + def test_handle_newline(self): + cube = self.cube + cube.attributes['lines'] = 'first\nsecond' + representer = CubeRepresentation(cube) + representer._get_bits(representer._get_lines()) + result = representer._make_content() + self.assertIn('first
    second', result) + @tests.skip_data class Test_repr_html(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py index 76a5c13861..0287d1d7b7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2013 - 2018, Met Office +# (C) British Crown Copyright 2013 - 2019, Met Office # # This file is part of Iris. # @@ -35,7 +35,8 @@ from iris.coord_systems import (GeogCS, TransverseMercator, RotatedGeogCS, LambertConformal, Mercator, Stereographic, LambertAzimuthalEqualArea, - AlbersEqualArea) + AlbersEqualArea, VerticalPerspective, + Geostationary) from iris.coords import DimCoord from iris.cube import Cube from iris.fileformats.netcdf import Saver @@ -44,6 +45,10 @@ class Test_write(tests.IrisTest): + # ------------------------------------------------------------------------- + # It is not considered necessary to have integration tests for saving + # EVERY coordinate system. A subset are tested below. + # ------------------------------------------------------------------------- def _transverse_mercator_cube(self, ellipsoid=None): data = np.arange(12).reshape(3, 4) cube = Cube(data, 'air_pressure_anomaly') @@ -162,16 +167,16 @@ def test_big_endian(self): def test_zlib(self): cube = self._simple_cube('>f4') - with mock.patch('iris.fileformats.netcdf.netCDF4') as api: - with Saver('/dummy/path', 'NETCDF4') as saver: - saver.write(cube, zlib=True) + api = self.patch('iris.fileformats.netcdf.netCDF4') + with Saver('/dummy/path', 'NETCDF4') as saver: + saver.write(cube, zlib=True) dataset = api.Dataset.return_value - create_var_calls = mock.call.createVariable( + create_var_call = mock.call( 'air_pressure_anomaly', np.dtype('float32'), ['dim0', 'dim1'], fill_value=None, shuffle=True, least_significant_digit=None, contiguous=False, zlib=True, fletcher32=False, - endian='native', complevel=4, chunksizes=None).call_list() - dataset.assert_has_calls(create_var_calls) + endian='native', complevel=4, chunksizes=None) + self.assertIn(create_var_call, dataset.createVariable.call_args_list) def test_least_significant_digit(self): cube = Cube(np.array([1.23, 4.56, 7.89]), @@ -247,6 +252,66 @@ def test_reserved_attributes(self): ds.close() self.assertEqual(res, 'something something_else') + def test_with_climatology(self): + cube = stock.climatology_3d() + with self.temp_filename('.nc') as nc_path: + with Saver(nc_path, 'NETCDF4') as saver: + saver.write(cube) + self.assertCDL(nc_path) + + +class Test__create_cf_bounds(tests.IrisTest): + def _check_bounds_setting(self, climatological=False): + # Generic test that can run with or without a climatological coord. + cube = stock.climatology_3d() + coord = cube.coord('time').copy() + # Over-write original value from stock.climatology_3d with test value. + coord.climatological = \ + climatological + + # Set up expected strings. + if climatological: + property_name = 'climatology' + varname_extra = 'climatology' + else: + property_name = 'bounds' + varname_extra = 'bnds' + boundsvar_name = 'time_' + varname_extra + + # Set up arguments for testing _create_cf_bounds. + saver = mock.MagicMock(spec=Saver) + # NOTE: 'saver' must have spec=Saver to fake isinstance(save, Saver), + # so it can pass as 'self' in the call to _create_cf_cbounds. + # Mock a '_dataset' property; not automatic because 'spec=Saver'. + saver._dataset = mock.MagicMock() + # Mock the '_ensure_valid_dtype' method to return an object with a + # suitable 'shape' and 'dtype'. + saver._ensure_valid_dtype.return_value = mock.Mock( + shape=coord.bounds.shape, dtype=coord.bounds.dtype) + var = mock.MagicMock(spec=nc.Variable) + + # Make the main call. + Saver._create_cf_bounds(saver, coord, var, 'time') + + # Test the call of _setncattr in _create_cf_bounds. + setncattr_call = mock.call(property_name, + boundsvar_name.encode(encoding='ascii')) + self.assertEqual(setncattr_call, var.setncattr.call_args) + + # Test the call of createVariable in _create_cf_bounds. + dataset = saver._dataset + expected_dimensions = var.dimensions + ('bnds',) + create_var_call = mock.call( + boundsvar_name, coord.bounds.dtype, + expected_dimensions) + self.assertEqual(create_var_call, dataset.createVariable.call_args) + + def test_set_bounds_default(self): + self._check_bounds_setting(climatological=False) + + def test_set_bounds_climatology(self): + self._check_bounds_setting(climatological=True) + class Test_write__valid_x_cube_attributes(tests.IrisTest): """Testing valid_range, valid_min and valid_max attributes.""" @@ -806,6 +871,71 @@ def test_aea_cs(self): } self._test(coord_system, expected) + def test_vp_cs(self): + latitude_of_projection_origin = 1.0 + longitude_of_projection_origin = 2.0 + perspective_point_height = 2000000.0 + false_easting = 100.0 + false_northing = 200.0 + + semi_major_axis = 6377563.396 + semi_minor_axis = 6356256.909 + ellipsoid = GeogCS(semi_major_axis, semi_minor_axis) + + coord_system = VerticalPerspective( + latitude_of_projection_origin=latitude_of_projection_origin, + longitude_of_projection_origin=longitude_of_projection_origin, + perspective_point_height=perspective_point_height, + false_easting=false_easting, + false_northing=false_northing, + ellipsoid=ellipsoid) + expected = { + 'grid_mapping_name': b'vertical_perspective', + 'latitude_of_projection_origin': latitude_of_projection_origin, + 'longitude_of_projection_origin': longitude_of_projection_origin, + 'perspective_point_height': perspective_point_height, + 'false_easting': false_easting, + 'false_northing': false_northing, + 'semi_major_axis': semi_major_axis, + 'semi_minor_axis': semi_minor_axis, + 'longitude_of_prime_meridian': 0, + } + self._test(coord_system, expected) + + def test_geo_cs(self): + latitude_of_projection_origin = 0.0 + longitude_of_projection_origin = 2.0 + perspective_point_height = 2000000.0 + sweep_angle_axis = 'x' + false_easting = 100.0 + false_northing = 200.0 + + semi_major_axis = 6377563.396 + semi_minor_axis = 6356256.909 + ellipsoid = GeogCS(semi_major_axis, semi_minor_axis) + + coord_system = Geostationary( + latitude_of_projection_origin=latitude_of_projection_origin, + longitude_of_projection_origin=longitude_of_projection_origin, + perspective_point_height=perspective_point_height, + sweep_angle_axis=sweep_angle_axis, + false_easting=false_easting, + false_northing=false_northing, + ellipsoid=ellipsoid) + expected = { + 'grid_mapping_name': b'geostationary', + 'latitude_of_projection_origin': latitude_of_projection_origin, + 'longitude_of_projection_origin': longitude_of_projection_origin, + 'perspective_point_height': perspective_point_height, + 'sweep_angle_axis': sweep_angle_axis, + 'false_easting': false_easting, + 'false_northing': false_northing, + 'semi_major_axis': semi_major_axis, + 'semi_minor_axis': semi_minor_axis, + 'longitude_of_prime_meridian': 0, + } + self._test(coord_system, expected) + class Test__create_cf_cell_measure_variable(tests.IrisTest): # Saving of masked data is disallowed. diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py index 6a1aeb9d61..ed951c4bf9 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2018, Met Office +# (C) British Crown Copyright 2019, Met Office # # This file is part of Iris. # @@ -26,7 +26,7 @@ from dask.array import Array as dask_array import numpy as np -from iris._lazy_data import _limited_shape +from iris._lazy_data import _optimum_chunksize import iris.fileformats.cf from iris.fileformats.netcdf import _get_cf_var_data from iris.tests import mock @@ -35,8 +35,8 @@ class Test__get_cf_var_data(tests.IrisTest): def setUp(self): self.filename = 'DUMMY' - self.shape = (3, 240, 200) - self.expected_chunks = _limited_shape(self.shape) + self.shape = (300000, 240, 200) + self.expected_chunks = _optimum_chunksize(self.shape, self.shape) def _make(self, chunksizes): cf_data = mock.Mock(_FillValue=None) @@ -55,15 +55,16 @@ def test_cf_data_type(self): self.assertIsInstance(lazy_data, dask_array) def test_cf_data_chunks(self): - chunks = [1, 12, 100] + chunks = [2500, 240, 200] cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) lazy_data_chunks = [c[0] for c in lazy_data.chunks] - self.assertArrayEqual(chunks, lazy_data_chunks) + expected_chunks = _optimum_chunksize(chunks, self.shape) + self.assertArrayEqual(lazy_data_chunks, expected_chunks) def test_cf_data_no_chunks(self): # No chunks means chunks are calculated from the array's shape by - # `iris._lazy_data._limited_shape()`. + # `iris._lazy_data._optimum_chunksize()`. chunks = None cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) diff --git a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py index d9816c64b9..94abb974cd 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py +++ b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py @@ -64,7 +64,7 @@ def test_deferred_bytes(self): field = mock.Mock(core_data=core_data) data_shape = (100, 120) proxy = mock.Mock(dtype=np.dtype('f4'), shape=data_shape, - spec=pp.PPDataProxy) + spec=pp.PPDataProxy, ndim=len(data_shape)) # We can't directly inspect the concrete data source underlying # the dask array, so instead we patch the proxy creation and check it's # being created and invoked correctly. diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py index 4bb653f23f..4df1464a0e 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2014 - 2018, Met Office +# (C) British Crown Copyright 2014 - 2019, Met Office # # This file is part of Iris. # @@ -36,15 +36,23 @@ from iris.tests import mock +# from iris.tests.unit.fileformats.pyke_rules.compiled_krb\ +# .fc_rules_cf_fc.test_build_dimension_coordinate import RulesTestMixin + class TestBoundsVertexDim(tests.IrisTest): + # Lookup for various tests (which change the dimension order). + dim_names_lens = { + 'foo': 2, 'bar': 3, 'nv': 4, + # 'x' and 'y' used as aliases for 'foo' and 'bar' + 'x': 2, 'y': 3} + def setUp(self): # Create coordinate cf variables and pyke engine. - points = np.arange(6).reshape(2, 3) - - cf_data = self._make_cf_data(points) + dimension_names = ('foo', 'bar') + points, cf_data = self._make_array_and_cf_data(dimension_names) self.cf_coord_var = mock.Mock( spec=CFVariable, - dimensions=('foo', 'bar'), + dimensions=dimension_names, cf_name='wibble', cf_data=cf_data, standard_name=None, @@ -54,6 +62,15 @@ def setUp(self): dtype=points.dtype, __getitem__=lambda self, key: points[key]) + expected_bounds, _ = self._make_array_and_cf_data( + dimension_names=('foo', 'bar', 'nv')) + self.expected_coord = AuxCoord( + self.cf_coord_var[:], + long_name=self.cf_coord_var.long_name, + var_name=self.cf_coord_var.cf_name, + units=self.cf_coord_var.units, + bounds=expected_bounds) + self.engine = mock.Mock( cube=mock.Mock(), cf_var=mock.Mock(dimensions=('foo', 'bar'), @@ -61,9 +78,8 @@ def setUp(self): filename='DUMMY', provides=dict(coordinates=[])) - # Create patch for deferred loading that prevents attempted - # file access. This assumes that self.cf_bounds_var is - # defined in the test case. + # Patch the deferred loading that prevents attempted file access. + # This assumes that self.cf_bounds_var is defined in the test case. def patched__getitem__(proxy_self, keys): variable = None for var in (self.cf_coord_var, self.cf_bounds_var): @@ -71,135 +87,71 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.deferred_load_patch = mock.patch( - 'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + self.patch('iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', + new=patched__getitem__) - @staticmethod - def _make_cf_data(vals): + # Patch the helper function that retrieves the bounds cf variable, + # and a False flag for climatological. + # This avoids the need for setting up further mocking of cf objects. + def _get_per_test_bounds_var(_coord_unused): + # Return the 'cf_bounds_var' created by the current test. + return (self.cf_bounds_var, False) + + self.patch('iris.fileformats._pyke_rules.compiled_krb.' + 'fc_rules_cf_fc.get_cf_bounds_var', + new=_get_per_test_bounds_var) + + @classmethod + def _make_array_and_cf_data(cls, dimension_names): + shape = tuple(cls.dim_names_lens[name] + for name in dimension_names) cf_data = mock.Mock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=vals.shape) - return cf_data + cf_data.chunking = mock.MagicMock(return_value=shape) + return np.zeros(shape), cf_data - def test_slowest_varying_vertex_dim(self): + def _make_cf_bounds_var(self, dimension_names): # Create the bounds cf variable. - bounds = np.arange(24).reshape(4, 2, 3) - cf_data = self._make_cf_data(bounds) - self.cf_bounds_var = mock.Mock( + bounds, cf_data = self._make_array_and_cf_data(dimension_names) + cf_bounds_var = mock.Mock( spec=CFVariable, - dimensions=('nv', 'foo', 'bar'), + dimensions=dimension_names, cf_name='wibble_bnds', cf_data=cf_data, shape=bounds.shape, dtype=bounds.dtype, __getitem__=lambda self, key: bounds[key]) - # Expected bounds on the resulting coordinate should be rolled so that - # the vertex dimension is at the end. - expected_bounds = np.rollaxis(bounds, 0, bounds.ndim) - expected_coord = AuxCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=expected_bounds) + return bounds, cf_bounds_var - # Patch the helper function that retrieves the bounds cf variable. - # This avoids the need for setting up further mocking of cf objects. - get_cf_bounds_var_patch = mock.patch( - 'iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - return_value=self.cf_bounds_var) + def _check_case(self, dimension_names): + bounds, self.cf_bounds_var = self._make_cf_bounds_var( + dimension_names=dimension_names) # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, get_cf_bounds_var_patch: - build_auxiliary_coordinate(self.engine, self.cf_coord_var) + build_auxiliary_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0, 1]) + # Test that expected coord is built and added to cube. + self.engine.cube.add_aux_coord.assert_called_with( + self.expected_coord, [0, 1]) - # Test that engine.provides container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.provides['coordinates'], - expected_list) + # Test that engine.provides container is correctly populated. + expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] + self.assertEqual(self.engine.provides['coordinates'], + expected_list) def test_fastest_varying_vertex_dim(self): - bounds = np.arange(24).reshape(2, 3, 4) - cf_data = self._make_cf_data(bounds) - self.cf_bounds_var = mock.Mock( - spec=CFVariable, - dimensions=('foo', 'bar', 'nv'), - cf_name='wibble_bnds', - cf_data=cf_data, - shape=bounds.shape, - dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key]) - - expected_coord = AuxCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=bounds) - - get_cf_bounds_var_patch = mock.patch( - 'iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - return_value=self.cf_bounds_var) - - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, get_cf_bounds_var_patch: - build_auxiliary_coordinate(self.engine, self.cf_coord_var) + # The usual order. + self._check_case(dimension_names=('foo', 'bar', 'nv')) - # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0, 1]) - - # Test that engine.provides container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.provides['coordinates'], - expected_list) + def test_slowest_varying_vertex_dim(self): + # Bounds in the first (slowest varying) dimension. + self._check_case(dimension_names=('nv', 'foo', 'bar')) def test_fastest_with_different_dim_names(self): # Despite the dimension names ('x', and 'y') differing from the coord's # which are 'foo' and 'bar' (as permitted by the cf spec), # this should still work because the vertex dim is the fastest varying. - bounds = np.arange(24).reshape(2, 3, 4) - cf_data = self._make_cf_data(bounds) - self.cf_bounds_var = mock.Mock( - spec=CFVariable, - dimensions=('x', 'y', 'nv'), - cf_name='wibble_bnds', - cf_data=cf_data, - shape=bounds.shape, - dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key]) - - expected_coord = AuxCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=bounds) - - get_cf_bounds_var_patch = mock.patch( - 'iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - return_value=self.cf_bounds_var) - - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, get_cf_bounds_var_patch: - build_auxiliary_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0, 1]) - - # Test that engine.provides container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.provides['coordinates'], - expected_list) + self._check_case(dimension_names=('x', 'y', 'nv')) class TestDtype(tests.IrisTest): @@ -265,5 +217,90 @@ def test_add_offset_float(self): self.assertEqual(coord.dtype.kind, 'f') +class TestCoordConstruction(tests.IrisTest): + def setUp(self): + # Create dummy pyke engine. + self.engine = mock.Mock( + cube=mock.Mock(), + cf_var=mock.Mock(dimensions=('foo', 'bar')), + filename='DUMMY', + provides=dict(coordinates=[])) + + points = np.arange(6) + self.cf_coord_var = mock.Mock( + dimensions=('foo',), + scale_factor=1, + add_offset=0, + cf_name='wibble', + cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), + standard_name=None, + long_name='wibble', + units='days since 1970-01-01', + calendar=None, + shape=points.shape, + dtype=points.dtype, + __getitem__=lambda self, key: points[key]) + + bounds = np.arange(12).reshape(6, 2) + self.cf_bounds_var = mock.Mock( + dimensions=('x', 'nv'), + scale_factor=1, + add_offset=0, + cf_name='wibble_bnds', + cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), + shape=bounds.shape, + dtype=bounds.dtype, + __getitem__=lambda self, key: bounds[key]) + self.bounds = bounds + + # Create patch for deferred loading that prevents attempted + # file access. This assumes that self.cf_coord_var and + # self.cf_bounds_var are defined in the test case. + def patched__getitem__(proxy_self, keys): + for var in (self.cf_coord_var, self.cf_bounds_var): + if proxy_self.variable_name == var.cf_name: + return var[keys] + raise RuntimeError() + + self.patch('iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', + new=patched__getitem__) + + # Patch the helper function that retrieves the bounds cf variable. + # This avoids the need for setting up further mocking of cf objects. + self.use_climatology_bounds = False # Set this when you need to. + + def get_cf_bounds_var(coord_var): + return self.cf_bounds_var, self.use_climatology_bounds + + self.patch('iris.fileformats._pyke_rules.compiled_krb.' + 'fc_rules_cf_fc.get_cf_bounds_var', + new=get_cf_bounds_var) + + def check_case_aux_coord_construction(self, climatology=False): + # Test a generic auxiliary coordinate, with or without + # a climatological coord. + self.use_climatology_bounds = climatology + + expected_coord = AuxCoord( + self.cf_coord_var[:], + long_name=self.cf_coord_var.long_name, + var_name=self.cf_coord_var.cf_name, + units=self.cf_coord_var.units, + bounds=self.bounds, + climatological=climatology) + + build_auxiliary_coordinate(self.engine, self.cf_coord_var) + + # Test that expected coord is built and added to cube. + self.engine.cube.add_aux_coord.assert_called_with( + expected_coord, [0]) + + def test_aux_coord_construction(self): + self.check_case_aux_coord_construction(climatology=False) + + def test_aux_coord_construction__climatology(self): + self.check_case_aux_coord_construction(climatology=True) + + if __name__ == '__main__': tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py index 5c2892cea7..39d46ba88a 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2014 - 2015, Met Office +# (C) British Crown Copyright 2014 - 2019, Met Office # # This file is part of Iris. # @@ -35,33 +35,33 @@ from iris.tests import mock -class TestInvalidGlobalAttributes(tests.IrisTest): - @staticmethod - def _make_engine(global_attributes=None): - if global_attributes is None: - global_attributes = {} +def _make_engine(global_attributes=None, standard_name=None, long_name=None): + if global_attributes is None: + global_attributes = {} + + cf_group = mock.Mock(global_attributes=global_attributes) - cf_group = mock.Mock(global_attributes=global_attributes) + cf_var = mock.Mock( + cf_name='wibble', + standard_name=standard_name, + long_name=long_name, + units='m', + dtype=np.float64, + cell_methods=None, + cf_group=cf_group) - cf_var = mock.Mock( - cf_name='wibble', - standard_name=None, - long_name=None, - units='m', - dtype=np.float64, - cell_methods=None, - cf_group=cf_group) + engine = mock.Mock( + cube=Cube([23]), + cf_var=cf_var) - engine = mock.Mock( - cube=Cube([23]), - cf_var=cf_var) + return engine - return engine +class TestInvalidGlobalAttributes(tests.IrisTest): def test_valid(self): global_attributes = {'Conventions': 'CF-1.5', 'comment': 'Mocked test object'} - engine = self._make_engine(global_attributes) + engine = _make_engine(global_attributes) build_cube_metadata(engine) expected = global_attributes self.assertEqual(engine.cube.attributes, expected) @@ -70,7 +70,7 @@ def test_invalid(self): global_attributes = {'Conventions': 'CF-1.5', 'comment': 'Mocked test object', 'calendar': 'standard'} - engine = self._make_engine(global_attributes) + engine = _make_engine(global_attributes) with mock.patch('warnings.warn') as warn: build_cube_metadata(engine) # Check for a warning. @@ -84,5 +84,50 @@ def test_invalid(self): self.assertEqual(engine.cube.attributes, expected) +class TestCubeName(tests.IrisTest): + def check_cube_names(self, inputs, expected): + # Inputs - attributes on the fake CF Variable. + standard_name, long_name = inputs + # Expected - The expected cube attributes. + exp_standard_name, exp_long_name = expected + + engine = _make_engine(standard_name=standard_name, long_name=long_name) + build_cube_metadata(engine) + + # Check the cube's standard name and long name are as expected. + self.assertEqual(engine.cube.standard_name, exp_standard_name) + self.assertEqual(engine.cube.long_name, exp_long_name) + + def test_standard_name_none_long_name_none(self): + inputs = (None, None) + expected = (None, None) + self.check_cube_names(inputs, expected) + + def test_standard_name_none_long_name_set(self): + inputs = (None, 'ice_thickness_long_name') + expected = (None, 'ice_thickness_long_name') + self.check_cube_names(inputs, expected) + + def test_standard_name_valid_long_name_none(self): + inputs = ('sea_ice_thickness', None) + expected = ('sea_ice_thickness', None) + self.check_cube_names(inputs, expected) + + def test_standard_name_valid_long_name_set(self): + inputs = ('sea_ice_thickness', 'ice_thickness_long_name') + expected = ('sea_ice_thickness', 'ice_thickness_long_name') + self.check_cube_names(inputs, expected) + + def test_standard_name_invalid_long_name_none(self): + inputs = ('not_a_standard_name', None) + expected = (None, 'not_a_standard_name',) + self.check_cube_names(inputs, expected) + + def test_standard_name_invalid_long_name_set(self): + inputs = ('not_a_standard_name', 'ice_thickness_long_name') + expected = (None, 'ice_thickness_long_name') + self.check_cube_names(inputs, expected) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py index 38239ea9a0..ed6111dc36 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2014 - 2018, Met Office +# (C) British Crown Copyright 2014 - 2019, Met Office # # This file is part of Iris. # @@ -61,8 +61,10 @@ def patched__getitem__(proxy_self, keys): # Patch the helper function that retrieves the bounds cf variable. # This avoids the need for setting up further mocking of cf objects. + self.use_climatology_bounds = False # Set this when you need to. + def get_cf_bounds_var(coord_var): - return self.cf_bounds_var + return self.cf_bounds_var, self.use_climatology_bounds self.get_cf_bounds_var_patch = mock.patch( 'iris.fileformats._pyke_rules.compiled_krb.' @@ -89,12 +91,16 @@ def _set_cf_coord_var(self, points): cf_name='wibble', standard_name=None, long_name='wibble', - units='m', + units='days since 1970-01-01', + calendar=None, shape=points.shape, dtype=points.dtype, __getitem__=lambda self, key: points[key]) - def test_dim_coord_construction(self): + def check_case_dim_coord_construction(self, climatology=False): + # Test a generic dimension coordinate, with or without + # a climatological coord. + self.use_climatology_bounds = climatology self._set_cf_coord_var(np.arange(6)) expected_coord = DimCoord( @@ -102,7 +108,8 @@ def test_dim_coord_construction(self): long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + climatological=climatology) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -112,6 +119,12 @@ def test_dim_coord_construction(self): self.engine.cube.add_dim_coord.assert_called_with( expected_coord, [0]) + def test_dim_coord_construction(self): + self.check_case_dim_coord_construction(climatology=False) + + def test_dim_coord_construction__climatology(self): + self.check_case_dim_coord_construction(climatology=True) + def test_dim_coord_construction_masked_array(self): self._set_cf_coord_var(np.ma.array( np.arange(6), diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py new file mode 100644 index 0000000000..c530538b90 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py @@ -0,0 +1,81 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ +fc_rules_cf_fc.build_geostationary_coordinate_system`. + +""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# import iris tests first so that some things can be initialised before +# importing anything else +import iris.tests as tests + +import iris +from iris.coord_systems import Geostationary +from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ + build_geostationary_coordinate_system +from iris.tests import mock + + +class TestBuildGeostationaryCoordinateSystem(tests.IrisTest): + def _test(self, inverse_flattening=False): + """ + Generic test that can check vertical perspective validity with or + without inverse flattening. + """ + cf_grid_var_kwargs = { + 'spec': [], + 'latitude_of_projection_origin': 0.0, + 'longitude_of_projection_origin': 2.0, + 'perspective_point_height': 2000000.0, + 'sweep_angle_axis': 'x', + 'false_easting': 100.0, + 'false_northing': 200.0, + 'semi_major_axis': 6377563.396} + + ellipsoid_kwargs = {'semi_major_axis': 6377563.396} + if inverse_flattening: + ellipsoid_kwargs['inverse_flattening'] = 299.3249646 + else: + ellipsoid_kwargs['semi_minor_axis'] = 6356256.909 + cf_grid_var_kwargs.update(ellipsoid_kwargs) + + cf_grid_var = mock.Mock(**cf_grid_var_kwargs) + ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs) + + cs = build_geostationary_coordinate_system(None, cf_grid_var) + expected = Geostationary( + latitude_of_projection_origin=cf_grid_var. + latitude_of_projection_origin, + longitude_of_projection_origin=cf_grid_var. + longitude_of_projection_origin, + perspective_point_height=cf_grid_var.perspective_point_height, + sweep_angle_axis=cf_grid_var.sweep_angle_axis, + false_easting=cf_grid_var.false_easting, + false_northing=cf_grid_var.false_northing, + ellipsoid=ellipsoid) + + self.assertEqual(cs, expected) + + def test_valid(self): + self._test(inverse_flattening=False) + + def test_inverse_flattening(self): + self._test(inverse_flattening=True) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py new file mode 100644 index 0000000000..4eab8048db --- /dev/null +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py @@ -0,0 +1,79 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ +fc_rules_cf_fc.build_vertical_perspective_coordinate_system`. + +""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# import iris tests first so that some things can be initialised before +# importing anything else +import iris.tests as tests + +import iris +from iris.coord_systems import VerticalPerspective +from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ + build_vertical_perspective_coordinate_system +from iris.tests import mock + + +class TestBuildVerticalPerspectiveCoordinateSystem(tests.IrisTest): + def _test(self, inverse_flattening=False): + """ + Generic test that can check vertical perspective validity with or + without inverse flattening. + """ + cf_grid_var_kwargs = { + 'spec': [], + 'latitude_of_projection_origin': 1.0, + 'longitude_of_projection_origin': 2.0, + 'perspective_point_height': 2000000.0, + 'false_easting': 100.0, + 'false_northing': 200.0, + 'semi_major_axis': 6377563.396} + + ellipsoid_kwargs = {'semi_major_axis': 6377563.396} + if inverse_flattening: + ellipsoid_kwargs['inverse_flattening'] = 299.3249646 + else: + ellipsoid_kwargs['semi_minor_axis'] = 6356256.909 + cf_grid_var_kwargs.update(ellipsoid_kwargs) + + cf_grid_var = mock.Mock(**cf_grid_var_kwargs) + ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs) + + cs = build_vertical_perspective_coordinate_system(None, cf_grid_var) + expected = VerticalPerspective( + latitude_of_projection_origin=cf_grid_var. + latitude_of_projection_origin, + longitude_of_projection_origin=cf_grid_var. + longitude_of_projection_origin, + perspective_point_height=cf_grid_var.perspective_point_height, + false_easting=cf_grid_var.false_easting, + false_northing=cf_grid_var.false_northing, + ellipsoid=ellipsoid) + + self.assertEqual(cs, expected) + + def test_valid(self): + self._test(inverse_flattening=False) + + def test_inverse_flattening(self): + self._test(inverse_flattening=True) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py new file mode 100644 index 0000000000..8e379686d3 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py @@ -0,0 +1,67 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ +fc_rules_cf_fc.get_cf_bounds_var`. + +""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# import iris tests first so that some things can be initialised before +# importing anything else +import iris.tests as tests + + +from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ + get_cf_bounds_var, CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY + +from iris.tests import mock + + +class TestGetCFBoundsVar(tests.IrisTest): + # Tests to check that get_cf_bounds_var will return the bounds_var and + # the correct climatological flag. + def _generic_test(self, test_climatological_bounds=False): + cf_coord_var = mock.MagicMock() + + cf_group_dict = {'TEST': mock.sentinel.bounds_var} + if test_climatological_bounds: + cf_coord_var.cf_group.climatology = cf_group_dict + test_attr = CF_ATTR_CLIMATOLOGY + else: + cf_coord_var.cf_group.bounds = cf_group_dict + test_attr = CF_ATTR_BOUNDS + + for attr in (CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY): + attr_val = 'TEST' if attr == test_attr else None + setattr(cf_coord_var, attr, attr_val) + + bounds_var, climatological = get_cf_bounds_var(cf_coord_var) + self.assertIs(bounds_var, mock.sentinel.bounds_var) + self.assertEqual(climatological, test_climatological_bounds) + + def test_bounds_normal(self): + self._generic_test(test_climatological_bounds=False) + + def test_bounds_climatological(self): + self._generic_test(test_climatological_bounds=True) + + +if __name__ == '__main__': + tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py new file mode 100644 index 0000000000..4f29965096 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py @@ -0,0 +1,258 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ +fc_rules_cf_fc.get_names`. + +""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# import iris tests first so that some things can be initialised before +# importing anything else +import iris.tests as tests + +import numpy as np + +from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import get_names +from iris.tests import mock + + +class TestGetNames(tests.IrisTest): + """ + The tests included in this class cover all the variations of possible + combinations of the following inputs: + * standard_name = [None, 'projection_y_coordinate', 'latitude_coordinate'] + * long_name = [None, 'lat_long_name'] + * var_name = ['grid_latitude', 'lat_var_name'] + * coord_name = [None, 'latitude'] + + standard_name, var_name and coord_name each contain a different valid CF + standard name so that it is clear which is being used to set the resulting + standard_name. + + """ + @staticmethod + def _make_cf_var(standard_name, long_name, cf_name): + cf_var = mock.Mock( + cf_name=cf_name, + standard_name=standard_name, + long_name=long_name, + units='degrees', + dtype=np.float64, + cell_methods=None, + cf_group=mock.Mock(global_attributes={})) + return cf_var + + def check_names(self, inputs, expected): + # Inputs - attributes on the fake CF Variable. Note: coord_name is + # optionally set in some pyke rules. + standard_name, long_name, var_name, coord_name = inputs + # Expected - The expected names and attributes. + exp_std_name, exp_long_name, exp_var_name, exp_attributes = expected + + cf_var = self._make_cf_var(standard_name=standard_name, + long_name=long_name, cf_name=var_name) + attributes = {} + res_standard_name, res_long_name, res_var_name = get_names( + cf_var, coord_name, attributes) + + # Check the names and attributes are as expected. + self.assertEqual(res_standard_name, exp_std_name) + self.assertEqual(res_long_name, exp_long_name) + self.assertEqual(res_var_name, exp_var_name) + self.assertEqual(attributes, exp_attributes) + + def test_var_name_valid(self): + # Only var_name is set and it is set to a valid standard name. + inp = (None, None, 'grid_latitude', None) + exp = ('grid_latitude', None, 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_var_name_valid_coord_name_set(self): + # var_name is a valid standard name, coord_name is also set. + inp = (None, None, 'grid_latitude', 'latitude') + exp = ('latitude', None, 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_var_name_invalid(self): + # Only var_name is set but it is not a valid standard name. + inp = (None, None, 'lat_var_name', None) + exp = (None, None, 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_var_name_invalid_coord_name_set(self): + # var_name is not a valid standard name, the coord_name is also set. + inp = (None, None, 'lat_var_name', 'latitude') + exp = ('latitude', None, 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_long_name_set_var_name_valid(self): + # long_name is not None, var_name is set to a valid standard name. + inp = (None, 'lat_long_name', 'grid_latitude', None) + exp = ('grid_latitude', 'lat_long_name', 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_long_name_set_var_name_valid_coord_name_set(self): + # long_name is not None, var_name is set to a valid standard name, and + # coord_name is set. + inp = (None, 'lat_long_name', 'grid_latitude', 'latitude') + exp = ('latitude', 'lat_long_name', 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_long_name_set_var_name_invalid(self): + # long_name is not None, var_name is not set to a valid standard name. + inp = (None, 'lat_long_name', 'lat_var_name', None) + exp = (None, 'lat_long_name', 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_long_name_set_var_name_invalid_coord_name_set(self): + # long_name is not None, var_name is not set to a valid standard name, + # and coord_name is set. + inp = (None, 'lat_long_name', 'lat_var_name', 'latitude') + exp = ('latitude', 'lat_long_name', 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_std_name_valid_var_name_valid(self): + # standard_name is a valid standard name, var_name is a valid standard + # name. + inp = ('projection_y_coordinate', None, 'grid_latitude', None) + exp = ('projection_y_coordinate', None, 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_std_name_valid_var_name_valid_coord_name_set(self): + # standard_name is a valid standard name, var_name is a valid standard + # name, coord_name is set. + inp = ('projection_y_coordinate', None, 'grid_latitude', 'latitude') + exp = ('projection_y_coordinate', None, 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_std_name_valid_var_name_invalid(self): + # standard_name is a valid standard name, var_name is not a valid + # standard name. + inp = ('projection_y_coordinate', None, 'lat_var_name', None) + exp = ('projection_y_coordinate', None, 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_std_name_valid_var_name_invalid_coord_name_set(self): + # standard_name is a valid standard name, var_name is not a valid + # standard name, coord_name is set. + inp = ('projection_y_coordinate', None, 'lat_var_name', 'latitude') + exp = ('projection_y_coordinate', None, 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_std_name_valid_long_name_set_var_name_valid(self): + # standard_name is a valid standard name, long_name is not None, + # var_name is a valid standard name. + inp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', + None) + exp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_std_name_valid_long_name_set_var_name_valid_coord_name_set(self): + # standard_name is a valid standard name, long_name is not None, + # var_name is a valid standard name, coord_name is set. + inp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', + 'latitude') + exp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_std_name_valid_long_name_set_var_name_invalid(self): + # standard_name is a valid standard name, long_name is not None, + # var_name is not a valid standard name. + inp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', + None) + exp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_std_name_valid_long_name_set_var_name_invalid_coord_name_set( + self): + # standard_name is a valid standard name, long_name is not None, + # var_name is not a valid standard name, coord_name is set. + inp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', + 'latitude') + exp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_std_name_invalid_var_name_valid(self): + # standard_name is not a valid standard name, var_name is a valid + # standard name. + inp = ('latitude_coord', None, 'grid_latitude', None) + exp = ('grid_latitude', None, 'grid_latitude', {}) + self.check_names(inp, exp) + + def test_std_name_invalid_var_name_valid_coord_name_set(self): + # standard_name is not a valid standard name, var_name is a valid + # standard name, coord_name is set. + inp = ('latitude_coord', None, 'grid_latitude', 'latitude') + exp = ('latitude', None, 'grid_latitude', + {'invalid_standard_name': 'latitude_coord'}) + self.check_names(inp, exp) + + def test_std_name_invalid_var_name_invalid(self): + # standard_name is not a valid standard name, var_name is not a valid + # standard name. + inp = ('latitude_coord', None, 'lat_var_name', None) + exp = (None, None, 'lat_var_name', {}) + self.check_names(inp, exp) + + def test_std_name_invalid_var_name_invalid_coord_name_set(self): + # standard_name is not a valid standard name, var_name is not a valid + # standard name, coord_name is set. + inp = ('latitude_coord', None, 'lat_var_name', 'latitude') + exp = ('latitude', None, 'lat_var_name', + {'invalid_standard_name': 'latitude_coord'}) + self.check_names(inp, exp) + + def test_std_name_invalid_long_name_set_var_name_valid(self): + # standard_name is not a valid standard name, long_name is not None + # var_name is a valid standard name. + inp = ('latitude_coord', 'lat_long_name', 'grid_latitude', None) + exp = ('grid_latitude', 'lat_long_name', 'grid_latitude', + {'invalid_standard_name': 'latitude_coord'}) + self.check_names(inp, exp) + + def test_std_name_invalid_long_name_set_var_name_valid_coord_name_set( + self): + # standard_name is not a valid standard name, long_name is not None, + # var_name is a valid standard name, coord_name is set. + inp = ('latitude_coord', 'lat_long_name', 'grid_latitude', 'latitude') + exp = ('latitude', 'lat_long_name', 'grid_latitude', + {'invalid_standard_name': 'latitude_coord'}) + self.check_names(inp, exp) + + def test_std_name_invalid_long_name_set_var_name_invalid(self): + # standard_name is not a valid standard name, long_name is not None + # var_name is not a valid standard name. + inp = ('latitude_coord', 'lat_long_name', 'lat_var_name', None) + exp = (None, 'lat_long_name', 'lat_var_name', + {'invalid_standard_name': 'latitude_coord'}) + self.check_names(inp, exp) + + def test_std_name_invalid_long_name_set_var_name_invalid_coord_name_set( + self): + # standard_name is not a valid standard name, long_name is not None, + # var_name is not a valid standard name, coord_name is set. + inp = ('latitude_coord', 'lat_long_name', 'lat_var_name', 'latitude') + exp = ('latitude', 'lat_long_name', 'lat_var_name', + {'invalid_standard_name': 'latitude_coord'}) + self.check_names(inp, exp) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py index 3d64e038b5..d9a055e4b5 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2017, Met Office +# (C) British Crown Copyright 2017 - 2019, Met Office # # This file is part of Iris. # @@ -34,6 +34,7 @@ class MyProxy(object): def __init__(self, a): self.shape = a.shape self.dtype = a.dtype + self.ndim = a.ndim self.a = a def __getitem__(self, keys): diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py index 7acb0b6284..641283ef0d 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2017 - 2018, Met Office +# (C) British Crown Copyright 2017 - 2019, Met Office # # This file is part of Iris. # @@ -24,17 +24,17 @@ import iris.tests as tests import dask.array as da +import dask.config import numpy as np import numpy.ma as ma -from iris._lazy_data import as_lazy_data, _MAX_CHUNK_SIZE, _limited_shape +from iris._lazy_data import as_lazy_data, _optimum_chunksize from iris.tests import mock class Test_as_lazy_data(tests.IrisTest): def test_lazy(self): - data = da.from_array(np.arange(24).reshape((2, 3, 4)), - chunks=_MAX_CHUNK_SIZE) + data = da.from_array(np.arange(24).reshape((2, 3, 4)), chunks='auto') result = as_lazy_data(data) self.assertIsInstance(result, da.core.Array) @@ -50,17 +50,10 @@ def test_masked(self): def test_non_default_chunks(self): data = np.arange(24) - chunks = 12 + chunks = (12,) lazy_data = as_lazy_data(data, chunks=chunks) result, = np.unique(lazy_data.chunks) - self.assertEqual(result, chunks) - - def test_non_default_chunks__chunks_already_set(self): - chunks = 12 - data = da.from_array(np.arange(24), chunks=chunks) - lazy_data = as_lazy_data(data) - result, = np.unique(lazy_data.chunks) - self.assertEqual(result, chunks) + self.assertEqual(result, 24) def test_with_masked_constant(self): masked_data = ma.masked_array([8], mask=True) @@ -68,6 +61,11 @@ def test_with_masked_constant(self): result = as_lazy_data(masked_constant) self.assertIsInstance(result, da.core.Array) + +class Test__optimised_chunks(tests.IrisTest): + # Stable, known chunksize for testing. + FIXED_CHUNKSIZE_LIMIT = 1024 * 1024 * 64 + @staticmethod def _dummydata(shape): return mock.Mock(spec=da.core.Array, @@ -75,7 +73,7 @@ def _dummydata(shape): shape=shape) def test_chunk_size_limiting(self): - # Check the default chunksizes for large data. + # Check default chunksizes for large data (with a known size limit). given_shapes_and_resulting_chunks = [ ((16, 1024, 1024), (16, 1024, 1024)), # largest unmodified ((17, 1011, 1022), (8, 1011, 1022)), @@ -84,29 +82,87 @@ def test_chunk_size_limiting(self): ((17, 1, 1011, 1022), (8, 1, 1011, 1022)), ((11, 2, 1011, 1022), (5, 2, 1011, 1022)) ] - err_fmt = 'Result of reducing shape {} was {}, expected {}' + err_fmt = 'Result of optimising chunks {} was {}, expected {}' for (shape, expected) in given_shapes_and_resulting_chunks: - chunks = _limited_shape(shape) + chunks = _optimum_chunksize(shape, shape, + limit=self.FIXED_CHUNKSIZE_LIMIT) msg = err_fmt.format(shape, chunks, expected) self.assertEqual(chunks, expected, msg) + def test_chunk_size_expanding(self): + # Check the expansion of small chunks, (with a known size limit). + given_shapes_and_resulting_chunks = [ + ((1, 100, 100), (16, 100, 100), (16, 100, 100)), + ((1, 100, 100), (5000, 100, 100), (1667, 100, 100)), + ((3, 300, 200), (10000, 3000, 2000), (3, 1500, 2000)), + ((3, 300, 200), (10000, 300, 2000), (27, 300, 2000)), + ((3, 300, 200), (8, 300, 2000), (8, 300, 2000)), + ((3, 300, 200), (117, 300, 1000), (39, 300, 1000)), + ] + err_fmt = 'Result of optimising shape={};chunks={} was {}, expected {}' + for (shape, fullshape, expected) in given_shapes_and_resulting_chunks: + chunks = _optimum_chunksize(chunks=shape, shape=fullshape, + limit=self.FIXED_CHUNKSIZE_LIMIT) + msg = err_fmt.format(fullshape, shape, chunks, expected) + self.assertEqual(chunks, expected, msg) + + def test_chunk_expanding_equal_division(self): + # Check that expansion chooses equal chunk sizes as far as possible. + + # Table of test cases: + # (input-chunkshape, full-shape, size-limit, result-chunkshape) + testcases_chunksin_fullshape_limit_result = [ + ((4,), (12,), 15, (12,)), # gives a single chunk, of size 12 + ((4,), (13,), 15, (8,)), # chooses chunks of 8+5, better than 12+1 + ((4,), (16,), 15, (8,)), # 8+8 is better than 12+4; 16 is too big. + ((4,), (96,), 15, (12,)), # 12 is largest 'allowed' + ((4,), (96,), 31, (24,)), # 28 doesn't divide 96 so neatly, + # A multi-dimensional case, where trailing dims are 'filled'. + ((4, 5, 100), (25, 10, 200), 16*2000, (16, 10, 200)), + # Equivalent case with additional initial dimensions. + ((1, 1, 4, 5, 100), (3, 5, 25, 10, 200), 16*2000, + (1, 1, 16, 10, 200)), # effectively the same as the previous. + ] + err_fmt_main = ('Main chunks result of optimising ' + 'chunks={},shape={},limit={} ' + 'was {}, expected {}') + for (chunks, shape, limit, expected_result) in \ + testcases_chunksin_fullshape_limit_result: + result = _optimum_chunksize(chunks=chunks, + shape=shape, + limit=limit, + dtype=np.dtype('b1')) + msg = err_fmt_main.format(chunks, shape, limit, + result, expected_result) + self.assertEqual(result, expected_result, msg) + + def test_default_chunksize(self): + # Check that the "ideal" chunksize is taken from the dask config. + with dask.config.set({'array.chunk-size': '20b'}): + chunks = _optimum_chunksize((1, 8), + shape=(400, 20), + dtype=np.dtype('f4')) + self.assertEqual(chunks, (1, 4)) + def test_default_chunks_limiting(self): - # Check that chunking is limited when no specific 'chunks' given. - limitcall_patch = self.patch('iris._lazy_data._limited_shape') + # Check that chunking is still controlled when no specific 'chunks' + # is passed. + limitcall_patch = self.patch('iris._lazy_data._optimum_chunksize') test_shape = (3, 2, 4) data = self._dummydata(test_shape) as_lazy_data(data) self.assertEqual(limitcall_patch.call_args_list, - [mock.call(test_shape)]) - - def test_large_specific_chunk_passthrough(self): - # Check that even a too-large specific 'chunks' arg is honoured. - limitcall_patch = self.patch('iris._lazy_data._limited_shape') - huge_test_shape = (1001, 1002, 1003, 1004) - data = self._dummydata(huge_test_shape) - result = as_lazy_data(data, chunks=huge_test_shape) - self.assertEqual(limitcall_patch.call_args_list, []) - self.assertEqual(result.shape, huge_test_shape) + [mock.call(list(test_shape), + shape=test_shape, + dtype=np.dtype('f4'))]) + + def test_shapeless_data(self): + # Check that chunk optimisation is skipped if shape contains a zero. + limitcall_patch = self.patch('iris._lazy_data._optimum_chunksize') + test_shape = (2, 1, 0, 2) + data = self._dummydata(test_shape) + as_lazy_data(data, chunks=test_shape) + self.assertFalse(limitcall_patch.called) if __name__ == '__main__': diff --git a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py index 03782cda85..1d292b712d 100644 --- a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py +++ b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2018, Met Office +# (C) British Crown Copyright 2018 - 2019, Met Office # # This file is part of Iris. # @@ -36,6 +36,7 @@ class ArrayAccessCounter(object): def __init__(self, array): self.dtype = array.dtype self.shape = array.shape + self.ndim = array.ndim self._array = array self.access_count = 0 @@ -75,11 +76,22 @@ def test_combined_access(self): lazy_array = as_lazy_data(wrapped_array) derived_a = lazy_array + 1 derived_b = lazy_array + 2 + derived_c = lazy_array + 3 + derived_d = lazy_array + 4 + derived_e = lazy_array + 5 cube_a = Cube(derived_a) cube_b = Cube(derived_b) - co_realise_cubes(cube_a, cube_b) - # Though used twice, the source data should only get fetched once. - self.assertEqual(wrapped_array.access_count, 1) + cube_c = Cube(derived_c) + cube_d = Cube(derived_d) + cube_e = Cube(derived_e) + co_realise_cubes(cube_a, cube_b, cube_c, cube_d, cube_e) + # Though used more than once, the source data should only get fetched + # twice by dask. Once when dask performs an initial data access with + # no data payload to ascertain the metadata associated with the + # dask.array (this access is specific to dask 2+, see + # dask.array.utils.meta_from_array), and again when the whole data is + # accessed. + self.assertEqual(wrapped_array.access_count, 2) if __name__ == '__main__': diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py index 40ef6213ff..14148cfb5a 100644 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2017, Met Office +# (C) British Crown Copyright 2017 - 2019, Met Office # # This file is part of Iris. # @@ -26,13 +26,13 @@ import dask.array as da import numpy as np -from iris._lazy_data import is_lazy_data, _MAX_CHUNK_SIZE +from iris._lazy_data import is_lazy_data class Test_is_lazy_data(tests.IrisTest): def test_lazy(self): values = np.arange(30).reshape((2, 5, 3)) - lazy_array = da.from_array(values, chunks=_MAX_CHUNK_SIZE) + lazy_array = da.from_array(values, chunks='auto') self.assertTrue(is_lazy_data(lazy_array)) def test_real(self): diff --git a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py new file mode 100644 index 0000000000..b70af30296 --- /dev/null +++ b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py @@ -0,0 +1,76 @@ +# (C) British Crown Copyright 2019, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +"""Unit tests for the `iris.plot._check_geostationary_coords_and_convert +function.""" + +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +import numpy as np +from mock import Mock + +from cartopy.crs import Geostationary, NearsidePerspective +from iris.plot import _check_geostationary_coords_and_convert + + +class Test__check_geostationary_coords_and_convert(tests.IrisTest): + def setUp(self): + geostationary_altitude = 35785831.0 + # proj4_params is the one attribute of the Geostationary class that + # is needed for the function. + self.proj4_params = {'h': geostationary_altitude} + + # Simulate the maximum-dimension array that could be processed. + a = np.linspace(0, 2, 6) + b = np.linspace(2, 3, 5) + self.x_original, self.y_original = np.meshgrid(a, b) + + # Expected arrays if conversion takes place. + self.x_converted, self.y_converted = ( + i * geostationary_altitude for i in (self.x_original, + self.y_original)) + + def _test(self, geostationary=True): + # Re-usable test for when Geostationary is present OR absent. + if geostationary: + # A Geostationary projection WILL be processed. + projection_spec = Geostationary + target_tuple = (self.x_converted, self.y_converted) + else: + # A non-Geostationary projection WILL NOT be processed. + projection_spec = NearsidePerspective + target_tuple = (self.x_original, self.y_original) + + projection = Mock(spec=projection_spec) + projection.proj4_params = self.proj4_params + # Projection is looked for within a dictionary called kwargs. + kwargs = {'transform': projection} + + x, y = _check_geostationary_coords_and_convert(self.x_original, + self.y_original, + kwargs) + self.assertArrayEqual((x, y), target_tuple) + + def test_geostationary_present(self): + self._test(geostationary=True) + + def test_geostationary_absent(self): + self._test(geostationary=False) diff --git a/lib/iris/util.py b/lib/iris/util.py index b0f00c3b52..19d50ebc62 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -23,8 +23,11 @@ from six.moves import (filter, input, map, range, zip) # noqa import six +try: # Python 3 + from collections.abc import Hashable +except ImportError: # Python 2.7 + from collections import Hashable import abc -import collections from contextlib import contextmanager import copy import functools @@ -835,7 +838,7 @@ def __new__(cls, name, bases, namespace): @functools.total_ordering class _OrderedHashable(six.with_metaclass(_MetaOrderedHashable, - collections.Hashable)): + Hashable)): """ Convenience class for creating "immutable", hashable, and ordered classes. diff --git a/requirements/all.txt b/requirements/all.txt index 657be9ce19..46558e2cbd 100644 --- a/requirements/all.txt +++ b/requirements/all.txt @@ -2,7 +2,7 @@ # ------------------------------------------------ # esmpy regridding not available through pip. -#conda: esmpy>=7.0 (only python=2) +#conda: esmpy>=7.0 #gdal : under review -- not tested at present mo_pack nc-time-axis diff --git a/requirements/core.txt b/requirements/core.txt index e39777edb7..98971a1c23 100644 --- a/requirements/core.txt +++ b/requirements/core.txt @@ -4,10 +4,10 @@ # Without these, iris won't even import. cartopy -#conda: proj4<5 +#conda: proj4<6 cf-units>=2 cftime -dask[array] #conda: dask +dask[array]>=2 #conda: dask>=2 matplotlib>=2,<3 netcdf4 numpy>=1.14 diff --git a/requirements/extensions.txt b/requirements/extensions.txt index 0f3d0e0379..8e8e847f88 100644 --- a/requirements/extensions.txt +++ b/requirements/extensions.txt @@ -5,5 +5,5 @@ # struggle. To install these extensions, ensure iris[core] has been installed # first. -iris_grib;python_version<"3" #conda: +iris-grib #conda: gdal diff --git a/requirements/gen_conda_requirements.py b/requirements/gen_conda_requirements.py index c26dffe99f..41e5dc6d74 100644 --- a/requirements/gen_conda_requirements.py +++ b/requirements/gen_conda_requirements.py @@ -23,7 +23,7 @@ CONDA_PATTERN = '#conda:' -def read_conda_reqs(fname, options): +def read_conda_reqs(fname): lines = [] with open(fname, 'r') as fh: for line in fh: @@ -31,33 +31,19 @@ def read_conda_reqs(fname, options): if CONDA_PATTERN in line: line_start = line.index(CONDA_PATTERN) + len(CONDA_PATTERN) line = line[line_start:].strip() - if 'only python=2' in line: - if 'python=2' in options: - line = line.replace('(only python=2)', '') - lines.append(line) - else: - continue - else: - lines.append(line) - else: - lines.append(line) + lines.append(line) return lines -def compute_requirements(requirement_names=('core', ), options=None): +def compute_requirements(requirement_names=('core', )): conda_reqs_lines = [] - if 'python=2' in options: - conda_reqs_lines.append('python=2.*') - else: - conda_reqs_lines.append('# Python 3 conda configuration') - for req_name in requirement_names: fname = os.path.join(REQS_DIR, '{}.txt'.format(req_name)) if not os.path.exists(fname): raise RuntimeError('Unable to find the requirements file for {} ' 'in {}'.format(req_name, fname)) - conda_reqs_lines.extend(read_conda_reqs(fname, options)) + conda_reqs_lines.extend(read_conda_reqs(fname)) conda_reqs_lines.append('') return conda_reqs_lines @@ -70,9 +56,6 @@ def main(): "--groups", nargs='*', default=[], help=("Gather requirements for these given named groups " "(as found in the requirements/ folder)")) - parser.add_argument( - "--py2", action="store_true", - help="Build the conda requirements for a python 2 installation") args = parser.parse_args() @@ -80,11 +63,7 @@ def main(): requirement_names.insert(0, 'core') requirement_names.insert(0, 'setup') - options = [] - if args.py2: - options.append('python=2') - - print('\n'.join(compute_requirements(requirement_names, options))) + print('\n'.join(compute_requirements(requirement_names))) if __name__ == '__main__': diff --git a/tools/gen_translations.py b/tools/gen_translations.py index 5a710c5220..8df264d8ab 100644 --- a/tools/gen_translations.py +++ b/tools/gen_translations.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2014 - 2017, Met Office +# (C) British Crown Copyright 2014 - 2019, Met Office # # This file is part of Iris. # @@ -28,6 +28,7 @@ import requests import sys +import metarelate from metarelate.fuseki import FusekiServer from translator import (FORMAT_URIS, FieldcodeCFMappings, StashCFNameMappings, @@ -191,7 +192,7 @@ def build_grib_cf_map(fuseki, now, git_sha, base_dir): with open(filename, 'w') as fh: fh.write(HEADER.format(year=YEAR, doc_string=DOC_STRING_GRIB, datestamp=now, git_sha=git_sha, - name='Iris')) + name='iris-grib')) fh.write(HEADER_GRIB) fh.write('\n') @@ -221,6 +222,9 @@ def build_grib_cf_map(fuseki, now, git_sha, base_dir): def main(): + # Protect metarelate resource from 1.0 emergent bug + if not float(metarelate.__version__) >= 1.1: + raise ValueError("Please ensure that Metarelate Version is >= 1.1") now = datetime.utcnow().strftime('%d %B %Y %H:%m') git_sha = requests.get('http://www.metarelate.net/metOcean/latest_sha').text gen_path = os.path.abspath(sys.modules['__main__'].__file__)