'
- '
{}'
- ''
- '
')
-
- toc_template = ('
'
- '{}'
- '')
-
- random_image_content_template = '''
-// This file was automatically generated by gen_gallery.py & should not be
-// modified directly.
-
-images = new Array();
-
-{}
-
-'''
-
- random_image_template = "['{thumbfile}', '{full_image}', '{link}'];"
- random_image_join = 'images[{}] = {}'
-
- dirs = ('General', 'Meteorology', 'Oceanography')
-
- for subdir in dirs:
- rows.append(header_template.format(subdir, subdir, subdir))
- toc_rows.append(toc_template.format(subdir, subdir))
-
- origdir = os.path.join(os.path.dirname(outdir), rootdir, subdir)
- if not os.path.exists(origdir):
- origdir = os.path.join(os.path.dirname(outdir), 'plot_directive',
- rootdir, subdir)
- thumbdir = os.path.join(outdir, rootdir, subdir, 'thumbnails')
- if not os.path.exists(thumbdir):
- os.makedirs(thumbdir)
-
- data = []
-
- for filename in sorted(glob.glob(os.path.join(origdir, '*.png'))):
- if filename.endswith('hires.png'):
- continue
-
- path, filename = os.path.split(filename)
- basename, ext = os.path.splitext(filename)
- if basename in skips:
- continue
-
- # Create thumbnails based on images in tmpdir, and place them
- # within the build tree.
- orig_path = str(os.path.join(origdir, filename))
- thumb_path = str(os.path.join(thumbdir, filename))
- if out_of_date(orig_path, thumb_path) or True:
- thumbnails[orig_path] = thumb_path
-
- m = multiimage.match(basename)
- if m is not None:
- basename = m.group(1)
-
- data.append((subdir, basename,
- os.path.join(rootdir, subdir, 'thumbnails',
- filename)))
-
- for (subdir, basename, thumbfile) in data:
- if thumbfile is not None:
- anchor = os.path.basename(thumbfile)
- anchor = os.path.splitext(anchor)[0].replace('_', '-')
- link = 'examples/{}/{}.html#{}'.format(
- subdir,
- basename,
- anchor)
- rows.append(link_template.format(
- href=link,
- thumb_file=thumbfile,
- alternative_text=basename))
- random_image.append(random_image_template.format(
- link=link,
- thumbfile=thumbfile,
- basename=basename,
- full_image='_images/' + os.path.basename(thumbfile)))
-
- if len(data) == 0:
- warnings.warn('No thumbnails were found in {}'.format(subdir))
-
- # Close out the
opened up at the top of this loop.
- rows.append('
')
-
- # Generate JS list of images for front page.
- random_image_content = '\n'.join([random_image_join.format(i, line)
- for i, line in enumerate(random_image)])
- random_image_content = random_image_content_template.format(
- random_image_content)
- random_image_script_path = os.path.join(app.builder.srcdir,
- '_static',
- 'random_image.js')
- with open(random_image_script_path, 'w') as fh:
- fh.write(random_image_content)
-
- content = template.format('\n'.join(toc_rows),
- '\n'.join(rows))
-
- # Only write out the file if the contents have actually changed.
- # Otherwise, this triggers a full rebuild of the docs.
-
- gallery_path = os.path.join(app.builder.srcdir,
- '_templates',
- 'gallery.html')
- if os.path.exists(gallery_path):
- with open(gallery_path, 'r') as fh:
- regenerate = fh.read() != content
- else:
- regenerate = True
- if regenerate:
- with open(gallery_path, 'w') as fh:
- fh.write(content)
-
- for key in status_iterator(thumbnails, 'generating thumbnails... ',
- length=len(thumbnails)):
- image.thumbnail(key, thumbnails[key], 0.3)
-
-
-def setup(app):
- app.connect('env-updated', gen_gallery)
diff --git a/docs/iris/src/sphinxext/generate_package_rst.py b/docs/iris/src/sphinxext/generate_package_rst.py
index 0c6510c170..5ce9f6d014 100644
--- a/docs/iris/src/sphinxext/generate_package_rst.py
+++ b/docs/iris/src/sphinxext/generate_package_rst.py
@@ -8,11 +8,23 @@
import sys
import re
import inspect
+import ntpath
+
+# list of tuples for modules to exclude. Useful if the documentation throws
+# warnings, especially for experimental modules.
+exclude_modules = [
+ ("experimental/raster", "iris.experimental.raster") # gdal conflicts
+]
+
+
+# print to stdout, including the name of the python file
+def autolog(message):
+ print("[{}] {}".format(ntpath.basename(__file__), message))
document_dict = {
# Use autoclass for classes.
- 'class': '''
+ "class": """
{object_docstring}
..
@@ -22,20 +34,21 @@
:undoc-members:
:inherited-members:
-''',
- 'function': '''
+""",
+ "function": """
.. autofunction:: {object_name}
-''',
+""",
# For everything else, let automodule do some magic...
- None: '''
+ None: """
.. autodata:: {object_name}
-'''}
+""",
+}
-horizontal_sep = '''
+horizontal_sep = """
.. raw:: html
↑ top ↑
@@ -47,21 +60,22 @@
-->
-'''
+"""
def lookup_object_type(obj):
if inspect.isclass(obj):
- return 'class'
+ return "class"
elif inspect.isfunction(obj):
- return 'function'
+ return "function"
else:
return None
-def auto_doc_module(file_path, import_name, root_package,
- package_toc=None, title=None):
- doc = r'''.. _{import_name}:
+def auto_doc_module(
+ file_path, import_name, root_package, package_toc=None, title=None
+):
+ doc = r""".. _{import_name}:
{title_underline}
{title}
@@ -77,54 +91,66 @@ def auto_doc_module(file_path, import_name, root_package,
{module_elements}
+"""
-'''
if package_toc:
- sidebar = '''
-.. sidebar:: Modules in this package
-
+ sidebar = """
{package_toc_tree}
- '''.format(package_toc_tree=package_toc)
+ """.format(
+ package_toc_tree=package_toc
+ )
else:
- sidebar = ''
+ sidebar = ""
try:
mod = __import__(import_name)
except ImportError as e:
- message = r'''.. error::
+ message = r""".. error::
This module could not be imported. Some dependencies are missing::
- ''' + str(e)
- return doc.format(title=title or import_name,
- title_underline='=' * len(title or import_name),
- import_name=import_name, root_package=root_package,
- sidebar=sidebar, module_elements=message)
+ """ + str(
+ e
+ )
+ return doc.format(
+ title=title or import_name,
+ title_underline="=" * len(title or import_name),
+ import_name=import_name,
+ root_package=root_package,
+ sidebar=sidebar,
+ module_elements=message,
+ )
mod = sys.modules[import_name]
elems = dir(mod)
- if '__all__' in elems:
- document_these = [(attr_name, getattr(mod, attr_name))
- for attr_name in mod.__all__]
+ if "__all__" in elems:
+ document_these = [
+ (attr_name, getattr(mod, attr_name)) for attr_name in mod.__all__
+ ]
else:
- document_these = [(attr_name, getattr(mod, attr_name))
- for attr_name in elems
- if (not attr_name.startswith('_') and
- not inspect.ismodule(getattr(mod, attr_name)))]
+ document_these = [
+ (attr_name, getattr(mod, attr_name))
+ for attr_name in elems
+ if (
+ not attr_name.startswith("_")
+ and not inspect.ismodule(getattr(mod, attr_name))
+ )
+ ]
def is_from_this_module(arg):
- name = arg[0]
+ # name = arg[0]
obj = arg[1]
- return (hasattr(obj, '__module__') and
- obj.__module__ == mod.__name__)
+ return (
+ hasattr(obj, "__module__") and obj.__module__ == mod.__name__
+ )
- sort_order = {'class': 2, 'function': 1}
+ sort_order = {"class": 2, "function": 1}
# Sort them according to sort_order dict.
def sort_key(arg):
- name = arg[0]
+ # name = arg[0]
obj = arg[1]
return sort_order.get(lookup_object_type(obj), 0)
@@ -133,63 +159,81 @@ def sort_key(arg):
lines = []
for element, obj in document_these:
- object_name = import_name + '.' + element
+ object_name = import_name + "." + element
obj_content = document_dict[lookup_object_type(obj)].format(
object_name=object_name,
- object_name_header_line='+' * len(object_name),
- object_docstring=inspect.getdoc(obj))
+ object_name_header_line="+" * len(object_name),
+ object_docstring=inspect.getdoc(obj),
+ )
lines.append(obj_content)
lines = horizontal_sep.join(lines)
- module_elements = '\n'.join(' * :py:obj:`{}`'.format(element)
- for element, obj in document_these)
+ module_elements = "\n".join(
+ " * :py:obj:`{}`".format(element) for element, obj in document_these
+ )
lines = doc + lines
- return lines.format(title=title or import_name,
- title_underline='=' * len(title or import_name),
- import_name=import_name, root_package=root_package,
- sidebar=sidebar, module_elements=module_elements)
+ return lines.format(
+ title=title or import_name,
+ title_underline="=" * len(title or import_name),
+ import_name=import_name,
+ root_package=root_package,
+ sidebar=sidebar,
+ module_elements=module_elements,
+ )
def auto_doc_package(file_path, import_name, root_package, sub_packages):
- max_depth = 1 if import_name == 'iris' else 2
- package_toc = '\n '.join(sub_packages)
- package_toc = '''
+ max_depth = 1 if import_name == "iris" else 2
+ package_toc = "\n ".join(sub_packages)
+
+ package_toc = """
.. toctree::
:maxdepth: {:d}
:titlesonly:
+ :hidden:
{}
-'''.format(max_depth, package_toc)
+""".format(
+ max_depth, package_toc
+ )
- if '.' in import_name:
+ if "." in import_name:
title = None
else:
- title = import_name.capitalize() + ' reference documentation'
+ title = import_name.capitalize() + " API"
- return auto_doc_module(file_path, import_name, root_package,
- package_toc=package_toc, title=title)
+ return auto_doc_module(
+ file_path,
+ import_name,
+ root_package,
+ package_toc=package_toc,
+ title=title,
+ )
def auto_package_build(app):
root_package = app.config.autopackage_name
if root_package is None:
- raise ValueError('set the autopackage_name variable in the '
- 'conf.py file')
+ raise ValueError(
+ "set the autopackage_name variable in the " "conf.py file"
+ )
if not isinstance(root_package, list):
- raise ValueError('autopackage was expecting a list of packages to '
- 'document e.g. ["itertools"]')
+ raise ValueError(
+ "autopackage was expecting a list of packages to "
+ 'document e.g. ["itertools"]'
+ )
for package in root_package:
do_package(package)
def do_package(package_name):
- out_dir = package_name + os.path.sep
+ out_dir = "generated/api" + os.path.sep
# Import the root package. If this fails then an import error will be
# raised.
@@ -199,38 +243,45 @@ def do_package(package_name):
package_folder = []
module_folders = {}
+
for root, subFolders, files in os.walk(rootdir):
for fname in files:
name, ext = os.path.splitext(fname)
# Skip some non-relevant files.
- if (fname.startswith('.') or fname.startswith('#') or
- re.search('^_[^_]', fname) or fname.find('.svn') >= 0 or
- not (ext in ['.py', '.so'])):
+ if (
+ fname.startswith(".")
+ or fname.startswith("#")
+ or re.search("^_[^_]", fname)
+ or fname.find(".svn") >= 0
+ or not (ext in [".py", ".so"])
+ ):
continue
# Handle new shared library naming conventions
- if ext == '.so':
- name = name.split('.', 1)[0]
+ if ext == ".so":
+ name = name.split(".", 1)[0]
- rel_path = root_package + \
- os.path.join(root, fname).split(rootdir)[-1]
- mod_folder = root_package + \
- os.path.join(root).split(rootdir)[-1].replace('/', '.')
+ rel_path = (
+ root_package + os.path.join(root, fname).split(rootdir)[-1]
+ )
+ mod_folder = root_package + os.path.join(root).split(rootdir)[
+ -1
+ ].replace("/", ".")
# Only add this package to folder list if it contains an __init__
# script.
- if name == '__init__':
+ if name == "__init__":
package_folder.append([mod_folder, rel_path])
else:
- import_name = mod_folder + '.' + name
+ import_name = mod_folder + "." + name
mf_list = module_folders.setdefault(mod_folder, [])
mf_list.append((import_name, rel_path))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
for package, package_path in package_folder:
- if '._' in package or 'test' in package:
+ if "._" in package or "test" in package:
continue
paths = []
@@ -242,60 +293,83 @@ def do_package(package_name):
continue
if not spackage.startswith(package):
continue
- if spackage.count('.') > package.count('.') + 1:
+ if spackage.count(".") > package.count(".") + 1:
continue
- if 'test' in spackage:
+ if "test" in spackage:
continue
- split_path = spackage.rsplit('.', 2)[-2:]
- if any(part[0] == '_' for part in split_path):
+ split_path = spackage.rsplit(".", 2)[-2:]
+ if any(part[0] == "_" for part in split_path):
continue
- paths.append(os.path.join(*split_path) + '.rst')
+ paths.append(os.path.join(*split_path) + ".rst")
- paths.extend(os.path.join(os.path.basename(os.path.dirname(path)),
- os.path.basename(path).split('.', 1)[0])
- for imp_name, path in module_folders.get(package, []))
+ paths.extend(
+ os.path.join(
+ os.path.basename(os.path.dirname(path)),
+ os.path.basename(path).split(".", 1)[0],
+ )
+ for imp_name, path in module_folders.get(package, [])
+ )
paths.sort()
+
+ # check for any modules to exclude
+ for exclude_module in exclude_modules:
+ if exclude_module[0] in paths:
+ autolog(
+ "Excluding module in package: {}".format(exclude_module[0])
+ )
+ paths.remove(exclude_module[0])
+
doc = auto_doc_package(package_path, package, root_package, paths)
- package_dir = out_dir + package.replace('.', os.path.sep)
+ package_dir = out_dir + package.replace(".", os.path.sep)
if not os.path.exists(package_dir):
- os.makedirs(out_dir + package.replace('.', os.path.sep))
+ os.makedirs(out_dir + package.replace(".", os.path.sep))
- out_path = package_dir + '.rst'
+ out_path = package_dir + ".rst"
if not os.path.exists(out_path):
- print('Creating non-existent document {} ...'.format(out_path))
- with open(out_path, 'w') as fh:
+ autolog("Creating {} ...".format(out_path))
+ with open(out_path, "w") as fh:
fh.write(doc)
else:
- with open(out_path, 'r') as fh:
- existing_content = ''.join(fh.readlines())
+ with open(out_path, "r") as fh:
+ existing_content = "".join(fh.readlines())
if doc != existing_content:
- print('Creating out of date document {} ...'.format(
- out_path))
- with open(out_path, 'w') as fh:
+ autolog("Creating {} ...".format(out_path))
+ with open(out_path, "w") as fh:
fh.write(doc)
for import_name, module_path in module_folders.get(package, []):
- doc = auto_doc_module(module_path, import_name, root_package)
- out_path = out_dir + import_name.replace('.', os.path.sep) + '.rst'
- if not os.path.exists(out_path):
- print('Creating non-existent document {} ...'.format(
- out_path))
- with open(out_path, 'w') as fh:
- fh.write(doc)
- else:
- with open(out_path, 'r') as fh:
- existing_content = ''.join(fh.readlines())
- if doc != existing_content:
- print('Creating out of date document {} ...'.format(
- out_path))
- with open(out_path, 'w') as fh:
- fh.write(doc)
+ # check for any modules to exclude
+ for exclude_module in exclude_modules:
+ if import_name == exclude_module[1]:
+ autolog(
+ "Excluding module file: {}".format(exclude_module[1])
+ )
+ else:
+ doc = auto_doc_module(
+ module_path, import_name, root_package
+ )
+ out_path = (
+ out_dir
+ + import_name.replace(".", os.path.sep)
+ + ".rst"
+ )
+ if not os.path.exists(out_path):
+ autolog("Creating {} ...".format(out_path))
+ with open(out_path, "w") as fh:
+ fh.write(doc)
+ else:
+ with open(out_path, "r") as fh:
+ existing_content = "".join(fh.readlines())
+ if doc != existing_content:
+ autolog("Creating {} ...".format(out_path))
+ with open(out_path, "w") as fh:
+ fh.write(doc)
def setup(app):
- app.connect('builder-inited', auto_package_build)
- app.add_config_value('autopackage_name', None, 'env')
+ app.connect("builder-inited", auto_package_build)
+ app.add_config_value("autopackage_name", None, "env")
diff --git a/docs/iris/src/whitepapers/change_management.rst b/docs/iris/src/techpapers/change_management.rst
similarity index 95%
rename from docs/iris/src/whitepapers/change_management.rst
rename to docs/iris/src/techpapers/change_management.rst
index b279c91b96..d09237a4bf 100644
--- a/docs/iris/src/whitepapers/change_management.rst
+++ b/docs/iris/src/techpapers/change_management.rst
@@ -1,3 +1,5 @@
+:orphan:
+
.. _change_management:
Change Management in Iris from the User's perspective
@@ -44,25 +46,28 @@ User Actions : How you should respond to changes and releases
Checklist :
* when a new **testing or candidate version** is announced
- if convenient, test your working legacy code against it and report any problems.
+
+ * if convenient, test your working legacy code against it and report any problems.
* when a new **minor version is released**
- * review the 'Whats New' documentation to see if it introduces any
- deprecations that may affect you.
- * run your working legacy code and check for any deprecation warnings,
- indicating that modifications may be necessary at some point
- * when convenient :
+ * review the 'What's New' documentation to see if it introduces any
+ deprecations that may affect you.
+ * run your working legacy code and check for any deprecation warnings,
+ indicating that modifications may be necessary at some point
+ * when convenient :
* review existing code for use of deprecated features
* rewrite code to replace deprecated features
* when a new major version is **announced**
- ensure your code runs, without producing deprecation warnings, in the
+
+ * ensure your code runs, without producing deprecation warnings, in the
previous minor release
* when a new major version is **released**
- check for new deprecation warnings, as for a minor release
+
+ * check for new deprecation warnings, as for a minor release
Details
@@ -81,6 +86,7 @@ Our practices are intended be compatible with the principles defined in the
`SemVer project
`_ .
Key concepts covered here:
+
* :ref:`Release versions
`
* :ref:`Backwards compatibility `
* :ref:`Deprecation `
@@ -95,18 +101,18 @@ Backwards compatibility
usages unchanged (see :ref:`terminology ` below).
Minor releases may only include backwards-compatible changes.
-The following are examples of backward-compatible changes :
+The following are examples of backward-compatible changes:
* changes to documentation
* adding to a module : new submodules, functions, classes or properties
* adding to a class : new methods or properties
* adding to a function or method : new **optional** arguments or keywords
-The following are examples of **non-** backward-compatible changes :
+The following are examples of **non-** backward-compatible changes:
* removing (which includes *renaming*) any public module or submodule
* removing any public component : a module, class, method, function or
- data object property of a public API component
+ data object property of a public API component
* removing any property of a public object
* removing an argument or keyword from a method or function
* adding a required argument to a method or function
@@ -221,7 +227,7 @@ are :
* A non-zero "" denotes a bugfix version, thus a release "X.Y.0" may
be followed by "X.Y.1", "X.Y.2" etc, which *only* differ by containing
- bugfixes. Any bugfix release supercedes its predecessors, and does not
+ bugfixes. Any bugfix release supersedes its predecessors, and does not
change any (valid) APIs or behaviour : hence, it is always advised to
replace a given version with its latest bugfix successor, and there
should be no reason not to.
@@ -255,7 +261,7 @@ behaviour of existing code can only be made at a **major** release, i.e. when
"X.0" is released following the last previous "(X-1).Y.Z".
*Minor* releases, by contrast, consist of bugfixes, new features, and
-deprecations : Any valid exisiting code should be unaffected by these, so it
+deprecations : Any valid existing code should be unaffected by these, so it
will still run with the same results.
At a major release, only **deprecated** behaviours and APIs can be changed or
@@ -355,7 +361,7 @@ with the new release, which we obviously need to avoid.
* the user code usage is simply by calls to "iris.load"
* the change is not a bugfix, as the old way isn't actually "wrong"
* we don't want to add an extra keyword into all the relevant calls
- * we don't see a longterm future for the existing behaviour : we
+ * we don't see a long term future for the existing behaviour : we
expect everyone to adopt the new interpretation, eventually
For changes of this sort, the release will define a new boolean property of the
@@ -421,7 +427,7 @@ At (major) release ".0...":
* If your code is explicitly turning the option off, it will continue
to work in the same way at this point, but obviously time is
- runnning out.
+ running out.
* If your code is still using the old behaviour and *not* setting the
control option at all, its behaviour might now have changed
diff --git a/docs/iris/src/whitepapers/index.rst b/docs/iris/src/techpapers/index.rst
similarity index 54%
rename from docs/iris/src/whitepapers/index.rst
rename to docs/iris/src/techpapers/index.rst
index dd0876d257..773c8f7059 100644
--- a/docs/iris/src/whitepapers/index.rst
+++ b/docs/iris/src/techpapers/index.rst
@@ -1,8 +1,9 @@
-.. _whitepapers_index:
+.. _techpapers_index:
+
+
+Iris Technical Papers
+=====================
-============================
-Iris technical 'Whitepapers'
-============================
Extra information on specific technical issues.
.. toctree::
diff --git a/docs/iris/src/whitepapers/missing_data_handling.rst b/docs/iris/src/techpapers/missing_data_handling.rst
similarity index 100%
rename from docs/iris/src/whitepapers/missing_data_handling.rst
rename to docs/iris/src/techpapers/missing_data_handling.rst
diff --git a/docs/iris/src/whitepapers/um_files_loading.rst b/docs/iris/src/techpapers/um_files_loading.rst
similarity index 99%
rename from docs/iris/src/whitepapers/um_files_loading.rst
rename to docs/iris/src/techpapers/um_files_loading.rst
index fd2d2a2341..d8c796b31f 100644
--- a/docs/iris/src/whitepapers/um_files_loading.rst
+++ b/docs/iris/src/techpapers/um_files_loading.rst
@@ -30,7 +30,7 @@ Notes:
#. Iris treats Fieldsfile data almost exactly as if it were PP -- i.e. it
treats each field's lookup table entry like a PP header.
-#. The Iris datamodel is based on
+#. The Iris data model is based on
`NetCDF CF conventions `_, so most of this can
also be seen as a metadata translation between PP and CF terms, but it is
easier to discuss in terms of Iris elements.
@@ -132,7 +132,7 @@ For an ordinary latitude-longitude grid, the cubes have coordinates called
``ZDX/Y + BDX/Y * (1 .. LBNPT/LBROW)`` (*except*, if BDX/BDY is zero, the
values are taken from the extra data vector X/Y, if present).
* If X/Y_LOWER_BOUNDS extra data is available, this appears as bounds values
- of the horizontal cooordinates.
+ of the horizontal coordinates.
For **rotated** latitude-longitude coordinates (as for LBCODE=101), the
horizontal coordinates differ only slightly --
diff --git a/docs/iris/src/userguide/citation.rst b/docs/iris/src/userguide/citation.rst
index 01b655574e..7ce0a8ffc0 100644
--- a/docs/iris/src/userguide/citation.rst
+++ b/docs/iris/src/userguide/citation.rst
@@ -23,7 +23,7 @@ For example::
*******************
-Downloaded Software
+Downloaded software
*******************
Suggested format::
@@ -36,7 +36,7 @@ For example::
********************
-Checked out Software
+Checked out software
********************
Suggested format::
diff --git a/docs/iris/src/userguide/code_maintenance.rst b/docs/iris/src/userguide/code_maintenance.rst
index 00ba30506c..d03808e18f 100644
--- a/docs/iris/src/userguide/code_maintenance.rst
+++ b/docs/iris/src/userguide/code_maintenance.rst
@@ -1,31 +1,31 @@
-Code Maintenance
+Code maintenance
================
From a user point of view "code maintenance" means ensuring that your existing
working code stays working, in the face of changes to Iris.
-Stability and Change
+Stability and change
---------------------
In practice, as Iris develops, most users will want to periodically upgrade
-their installed version to access new features or at least bugfixes.
+their installed version to access new features or at least bug fixes.
This is obvious if you are still developing other code that uses Iris, or using
code from other sources.
However, even if you have only legacy code that remains untouched, some code
-maintenance effort is probably still necessary :
+maintenance effort is probably still necessary:
* On the one hand, *in principle*, working code will go on working, as long
as you don't change anything else.
- * However, such "version statis" can easily become a growing burden, if you
- are simply waiting until an update becomes unavoidable : Often, that will
+ * However, such "version stasis" can easily become a growing burden, if you
+ are simply waiting until an update becomes unavoidable, often that will
eventually occur when you need to update some other software component,
for some completely unconnected reason.
-Principles of Change Management
+Principles of change management
-------------------------------
When you upgrade software to a new version, you often find that you need to
diff --git a/docs/iris/src/userguide/cube_maths.rst b/docs/iris/src/userguide/cube_maths.rst
index 8fe6eb12d5..0ac2b8da74 100644
--- a/docs/iris/src/userguide/cube_maths.rst
+++ b/docs/iris/src/userguide/cube_maths.rst
@@ -60,6 +60,10 @@ but with the data representing their difference:
Scalar coordinates:
forecast_reference_time: 1859-09-01 06:00:00
height: 1.5 m
+ Attributes:
+ Conventions: CF-1.5
+ Model scenario: E1
+ source: Data from Met Office Unified Model 6.05
.. note::
@@ -208,7 +212,7 @@ The result could now be plotted using the guidance provided in the
.. only:: html
A very similar example to this can be found in
- :doc:`/examples/Meteorology/deriving_phenomena`.
+ :ref:`sphx_glr_generated_gallery_meteorology_plot_deriving_phenomena.py`.
.. only:: latex
diff --git a/docs/iris/src/userguide/cube_statistics.rst b/docs/iris/src/userguide/cube_statistics.rst
index 3ca7d9a2e0..310551c76f 100644
--- a/docs/iris/src/userguide/cube_statistics.rst
+++ b/docs/iris/src/userguide/cube_statistics.rst
@@ -93,7 +93,8 @@ can be used instead of ``MEAN``, see :mod:`iris.analysis` for a full list
of currently supported operators.
For an example of using this functionality, the
-:ref:`Hovmoller diagram ` example found
+:ref:`sphx_glr_generated_gallery_meteorology_plot_hovmoller.py`
+example found
in the gallery takes a zonal mean of an ``XYT`` cube by using the
``collapsed`` method with ``latitude`` and ``iris.analysis.MEAN`` as arguments.
@@ -147,7 +148,7 @@ These areas can now be passed to the ``collapsed`` method as weights:
Several examples of area averaging exist in the gallery which may be of interest,
including an example on taking a :ref:`global area-weighted mean
-`.
+`.
.. _cube-statistics-aggregated-by:
@@ -244,7 +245,7 @@ These two coordinates can now be used to aggregate by season and climate-year:
The primary change in the cube is that the cube's data has been
reduced in the 'time' dimension by aggregation (taking means, in this case).
-This has collected together all datapoints with the same values of season and
+This has collected together all data points with the same values of season and
season-year.
The results are now indexed by the 19 different possible values of season and
season-year in a new, reduced 'time' dimension.
diff --git a/docs/iris/src/userguide/end_of_userguide.rst b/docs/iris/src/userguide/end_of_userguide.rst
deleted file mode 100644
index c8f951a634..0000000000
--- a/docs/iris/src/userguide/end_of_userguide.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-End of the user guide
-=====================
-
-If this was your first time reading the user guide, we hope you found it enjoyable and informative.
-It is advised that you now go back to the :doc:`start ` and try experimenting with your own data.
-
-
-
-Iris gallery
-------------
-It can be very daunting to start coding a project from an empty file, that is why you will find many in-depth
-examples in the Iris gallery which can be used as a goal driven reference to producing your own visualisations.
-
-If you produce a visualisation which you think would add value to the gallery, please get in touch with us and
-we will consider including it as an example for all to benefit from.
diff --git a/docs/iris/src/userguide/index.rst b/docs/iris/src/userguide/index.rst
index 4fb7b62155..2a3b32fe11 100644
--- a/docs/iris/src/userguide/index.rst
+++ b/docs/iris/src/userguide/index.rst
@@ -1,11 +1,9 @@
.. _user_guide_index:
+.. _user_guide_introduction:
-===============
-Iris user guide
-===============
+Introduction
+============
-How to use the user guide
----------------------------
If you are reading this user guide for the first time it is strongly recommended that you read the user guide
fully before experimenting with your own data files.
@@ -18,24 +16,16 @@ links in order to understand the guide but they may serve as a useful reference
Since later pages depend on earlier ones, try reading this user guide sequentially using the ``next`` and ``previous`` links.
-User guide table of contents
--------------------------------
-
-.. toctree::
- :maxdepth: 2
- :numbered:
-
- iris_cubes.rst
- loading_iris_cubes.rst
- saving_iris_cubes.rst
- navigating_a_cube.rst
- subsetting_a_cube.rst
- real_and_lazy_data.rst
- plotting_a_cube.rst
- interpolation_and_regridding.rst
- merge_and_concat.rst
- cube_statistics.rst
- cube_maths.rst
- citation.rst
- code_maintenance.rst
- end_of_userguide.rst
+* :doc:`iris_cubes`
+* :doc:`loading_iris_cubes`
+* :doc:`saving_iris_cubes`
+* :doc:`navigating_a_cube`
+* :doc:`subsetting_a_cube`
+* :doc:`real_and_lazy_data`
+* :doc:`plotting_a_cube`
+* :doc:`interpolation_and_regridding`
+* :doc:`merge_and_concat`
+* :doc:`cube_statistics`
+* :doc:`cube_maths`
+* :doc:`citation`
+* :doc:`code_maintenance`
diff --git a/docs/iris/src/userguide/interpolation_and_regridding.rst b/docs/iris/src/userguide/interpolation_and_regridding.rst
index 565f9b61eb..65ac36eada 100644
--- a/docs/iris/src/userguide/interpolation_and_regridding.rst
+++ b/docs/iris/src/userguide/interpolation_and_regridding.rst
@@ -1,6 +1,5 @@
.. _interpolation_and_regridding:
-
.. testsetup:: *
import numpy as np
@@ -16,7 +15,7 @@ Iris provides powerful cube-aware interpolation and regridding functionality,
exposed through Iris cube methods. This functionality is provided by building
upon existing interpolation schemes implemented by SciPy.
-In Iris we refer to the avaliable types of interpolation and regridding as
+In Iris we refer to the available types of interpolation and regridding as
`schemes`. The following are the interpolation schemes that are currently
available in Iris:
diff --git a/docs/iris/src/userguide/iris_cubes.rst b/docs/iris/src/userguide/iris_cubes.rst
index dc423afba1..5929c402f2 100644
--- a/docs/iris/src/userguide/iris_cubes.rst
+++ b/docs/iris/src/userguide/iris_cubes.rst
@@ -1,13 +1,9 @@
-.. _user_guide_introduction:
-
-===================
-Introduction
-===================
-
.. _iris_data_structures:
+====================
Iris data structures
---------------------
+====================
+
The top level object in Iris is called a cube. A cube contains data and metadata about a phenomenon.
In Iris, a cube is an interpretation of the *Climate and Forecast (CF) Metadata Conventions* whose purpose is to:
@@ -33,6 +29,7 @@ by definition, its phenomenon.
* Each coordinate has a name and a unit.
* When a coordinate is added to a cube, the data dimensions that it represents are also provided.
+
* The shape of a coordinate is always the same as the shape of the associated data dimension(s) on the cube.
* A dimension not explicitly listed signifies that the coordinate is independent of that dimension.
* Each dimension of a coordinate must be mapped to a data dimension. The only coordinates with no mapping are
diff --git a/docs/iris/src/userguide/loading_iris_cubes.rst b/docs/iris/src/userguide/loading_iris_cubes.rst
index bf50acc614..006a919408 100644
--- a/docs/iris/src/userguide/loading_iris_cubes.rst
+++ b/docs/iris/src/userguide/loading_iris_cubes.rst
@@ -38,10 +38,12 @@ This shows that there were 2 cubes as a result of loading the file, they were:
``air_potential_temperature`` and ``surface_altitude``.
The ``surface_altitude`` cube was 2 dimensional with:
+
* the two dimensions have extents of 204 and 187 respectively and are
represented by the ``grid_latitude`` and ``grid_longitude`` coordinates.
The ``air_potential_temperature`` cubes were 4 dimensional with:
+
* the same length ``grid_latitude`` and ``grid_longitude`` dimensions as
``surface_altitide``
* a ``time`` dimension of length 3
@@ -267,7 +269,7 @@ boundary of a circular coordinate (this is often the meridian or the dateline /
antimeridian). An example use-case of this is to extract the entire Pacific Ocean
from a cube whose longitudes are bounded by the dateline.
-This functionality cannot be provided reliably using contraints. Instead you should use the
+This functionality cannot be provided reliably using constraints. Instead you should use the
functionality provided by :meth:`cube.intersection `
to extract this region.
diff --git a/docs/iris/src/userguide/merge_and_concat.rst b/docs/iris/src/userguide/merge_and_concat.rst
index b742b3ef5f..0d844ac403 100644
--- a/docs/iris/src/userguide/merge_and_concat.rst
+++ b/docs/iris/src/userguide/merge_and_concat.rst
@@ -1,7 +1,7 @@
.. _merge_and_concat:
=====================
-Merge and Concatenate
+Merge and concatenate
=====================
We saw in the :doc:`loading_iris_cubes` chapter that Iris tries to load as few cubes as
diff --git a/docs/iris/src/userguide/navigating_a_cube.rst b/docs/iris/src/userguide/navigating_a_cube.rst
index 055617e047..581d1a67cf 100644
--- a/docs/iris/src/userguide/navigating_a_cube.rst
+++ b/docs/iris/src/userguide/navigating_a_cube.rst
@@ -229,7 +229,7 @@ by field basis *before* they are automatically merged together:
# Add our own realization coordinate if it doesn't already exist.
if not cube.coords('realization'):
realization = np.int32(filename[-6:-3])
- ensemble_coord = icoords.AuxCoord(realization, standard_name='realization')
+ ensemble_coord = icoords.AuxCoord(realization, standard_name='realization', units="1")
cube.add_aux_coord(ensemble_coord)
filename = iris.sample_data_path('GloSea4', '*.pp')
diff --git a/docs/iris/src/userguide/plotting_a_cube.rst b/docs/iris/src/userguide/plotting_a_cube.rst
index d82cbbb027..f646aa4b3e 100644
--- a/docs/iris/src/userguide/plotting_a_cube.rst
+++ b/docs/iris/src/userguide/plotting_a_cube.rst
@@ -190,7 +190,7 @@ and providing the label keyword to identify it.
Once all of the lines have been added the :func:`matplotlib.pyplot.legend`
function can be called to indicate that a legend is desired:
-.. plot:: ../example_code/General/lineplot_with_legend.py
+.. plot:: ../gallery_code/general/plot_lineplot_with_legend.py
:include-source:
This example of consecutive ``qplt.plot`` calls coupled with the
@@ -272,7 +272,7 @@ Brewer colour palettes
***********************
Iris includes colour specifications and designs developed by
-`Cynthia Brewer `_.
+`Cynthia Brewer `_
These colour schemes are freely available under the following licence::
Apache-Style Software License for ColorBrewer software and ColorBrewer Color Schemes
@@ -298,7 +298,7 @@ For adding citations to Iris plots, see :ref:`brewer-cite` (below).
Available Brewer Schemes
========================
The following subset of Brewer palettes found at
-`colorbrewer.org `_ are available within Iris.
+`colorbrewer2.org `_ are available within Iris.
.. plot:: userguide/plotting_examples/brewer.py
diff --git a/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py b/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py
index 30a5fc4318..f3772328ab 100644
--- a/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py
+++ b/docs/iris/src/userguide/plotting_examples/1d_quickplot_simple.py
@@ -11,4 +11,5 @@
temperature_1d = temperature[5, :]
qplt.plot(temperature_1d)
+
plt.show()
diff --git a/docs/iris/src/userguide/plotting_examples/1d_simple.py b/docs/iris/src/userguide/plotting_examples/1d_simple.py
index b76752ac18..ea90faf402 100644
--- a/docs/iris/src/userguide/plotting_examples/1d_simple.py
+++ b/docs/iris/src/userguide/plotting_examples/1d_simple.py
@@ -11,4 +11,5 @@
temperature_1d = temperature[5, :]
iplt.plot(temperature_1d)
+
plt.show()
diff --git a/docs/iris/src/userguide/plotting_examples/1d_with_legend.py b/docs/iris/src/userguide/plotting_examples/1d_with_legend.py
index 1ee75e1ed9..26aeeef9a6 100644
--- a/docs/iris/src/userguide/plotting_examples/1d_with_legend.py
+++ b/docs/iris/src/userguide/plotting_examples/1d_with_legend.py
@@ -1,5 +1,4 @@
import matplotlib.pyplot as plt
-
import iris
import iris.plot as iplt
diff --git a/docs/iris/src/userguide/plotting_examples/brewer.py b/docs/iris/src/userguide/plotting_examples/brewer.py
index e4533a28f5..f2ede9f9bc 100644
--- a/docs/iris/src/userguide/plotting_examples/brewer.py
+++ b/docs/iris/src/userguide/plotting_examples/brewer.py
@@ -4,19 +4,26 @@
import iris.palette
-a = np.linspace(0, 1, 256).reshape(1, -1)
-a = np.vstack((a, a))
-
-maps = sorted(iris.palette.CMAP_BREWER)
-nmaps = len(maps)
-
-fig = plt.figure(figsize=(7, 10))
-fig.subplots_adjust(top=0.99, bottom=0.01, left=0.2, right=0.99)
-for i, m in enumerate(maps):
- ax = plt.subplot(nmaps, 1, i + 1)
- plt.axis("off")
- plt.imshow(a, aspect="auto", cmap=plt.get_cmap(m), origin="lower")
- pos = list(ax.get_position().bounds)
- fig.text(pos[0] - 0.01, pos[1], m, fontsize=8, horizontalalignment="right")
-
-plt.show()
+def main():
+ a = np.linspace(0, 1, 256).reshape(1, -1)
+ a = np.vstack((a, a))
+
+ maps = sorted(iris.palette.CMAP_BREWER)
+ nmaps = len(maps)
+
+ fig = plt.figure(figsize=(7, 10))
+ fig.subplots_adjust(top=0.99, bottom=0.01, left=0.2, right=0.99)
+ for i, m in enumerate(maps):
+ ax = plt.subplot(nmaps, 1, i + 1)
+ plt.axis("off")
+ plt.imshow(a, aspect="auto", cmap=plt.get_cmap(m), origin="lower")
+ pos = list(ax.get_position().bounds)
+ fig.text(
+ pos[0] - 0.01, pos[1], m, fontsize=8, horizontalalignment="right"
+ )
+
+ plt.show()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/iris/src/userguide/plotting_examples/cube_blockplot.py b/docs/iris/src/userguide/plotting_examples/cube_blockplot.py
index cd380f5e35..0961a97fdb 100644
--- a/docs/iris/src/userguide/plotting_examples/cube_blockplot.py
+++ b/docs/iris/src/userguide/plotting_examples/cube_blockplot.py
@@ -1,5 +1,4 @@
import matplotlib.pyplot as plt
-
import iris
import iris.quickplot as qplt
diff --git a/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py b/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py
index 6dce2b39de..45ba800485 100644
--- a/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py
+++ b/docs/iris/src/userguide/plotting_examples/cube_brewer_cite_contourf.py
@@ -1,5 +1,4 @@
import matplotlib.pyplot as plt
-
import iris
import iris.quickplot as qplt
import iris.plot as iplt
diff --git a/docs/iris/src/userguide/saving_iris_cubes.rst b/docs/iris/src/userguide/saving_iris_cubes.rst
index ecf2210810..fa67b6213d 100644
--- a/docs/iris/src/userguide/saving_iris_cubes.rst
+++ b/docs/iris/src/userguide/saving_iris_cubes.rst
@@ -6,8 +6,8 @@ Saving Iris cubes
Iris supports the saving of cubes and cube lists to:
-* CF netCDF (1.5)
-* GRIB (edition 2)
+* CF netCDF (version 1.6)
+* GRIB edition 2 (if `iris-grib `_ is installed)
* Met Office PP
@@ -57,7 +57,6 @@ The :py:func:`iris.save` function passes all other keywords through to the saver
See
* :py:func:`iris.fileformats.netcdf.save`
-* :py:func:`iris.fileformats.grib.save_grib2`
* :py:func:`iris.fileformats.pp.save`
for more details on supported arguments for the individual savers.
@@ -70,14 +69,14 @@ When saving to GRIB or PP, the save process may be intercepted between the trans
For example, a GRIB2 message with a particular known long_name may need to be saved to a specific parameter code and type of statistical process. This can be achieved by::
def tweaked_messages(cube):
- for cube, grib_message in iris.fileformats.grib.as_pairs(cube):
+ for cube, grib_message in iris_grib.save_pairs_from_cube(cube):
# post process the GRIB2 message, prior to saving
if cube.name() == 'carefully_customised_precipitation_amount':
gribapi.grib_set_long(grib_message, "typeOfStatisticalProcess", 1)
gribapi.grib_set_long(grib_message, "parameterCategory", 1)
gribapi.grib_set_long(grib_message, "parameterNumber", 1)
yield grib_message
- iris.fileformats.grib.save_messages(tweaked_messages(cubes[0]), '/tmp/agrib2.grib2')
+ iris_grib.save_messages(tweaked_messages(cubes[0]), '/tmp/agrib2.grib2')
Similarly a PP field may need to be written out with a specific value for LBEXP. This can be achieved by::
@@ -98,7 +97,7 @@ netCDF
NetCDF is a flexible container for metadata and cube metadata is closely related to the CF for netCDF semantics. This means that cube metadata is well represented in netCDF files, closely resembling the in memory metadata representation.
Thus there is no provision for similar save customisation functionality for netCDF saving, all customisations should be applied to the cube prior to saving to netCDF.
-Bespoke Saver
+Bespoke saver
--------------
A bespoke saver may be written to support an alternative file format. This can be provided to the :py:func:`iris.save` function, enabling Iris to write to a different file format.
diff --git a/docs/iris/src/whatsnew/1.0.rst b/docs/iris/src/whatsnew/1.0.rst
index 2a415c1bfe..79afd8cf1a 100644
--- a/docs/iris/src/whatsnew/1.0.rst
+++ b/docs/iris/src/whatsnew/1.0.rst
@@ -1,12 +1,15 @@
-What's new in Iris 1.0
-**********************
+v1.0 (17 Oct 2012)
+******************
-:Release: 1.0.0
-:Date: 15 Oct, 2012
-
-This document explains the new/changed features of Iris in version 1.0.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
With the release of Iris 1.0, we have broadly completed the transition
to the CF data model, and established a stable foundation for future
work. Following this release we plan to deliver significant performance
@@ -28,48 +31,44 @@ to formalise their data model reach maturity, they will be included
in Iris where significant backwards-compatibility can be maintained.
-Iris 1.0 features
-=================
+Features
+========
A summary of the main features added with version 1.0:
* Hybrid-pressure vertical coordinates, and the ability to load from GRIB.
+
* Initial support for CF-style coordinate systems.
+
* Use of Cartopy for mapping in matplotlib.
+
* Load data from NIMROD files.
+
* Availability of Cynthia Brewer colour palettes.
+
* Add a citation to a plot.
+
* Ensures netCDF files are properly closed.
+
* The ability to bypass merging when loading data.
+
* Save netCDF files with an unlimited dimension.
+
* A more explicit set of load functions, which also allow the automatic
cube merging to be bypassed as a last resort.
+
* The ability to project a cube with a lat-lon or rotated lat-lon coordinate
system into a range of map projections e.g. Polar Stereographic.
-
-Incompatible changes
---------------------
-* The "source" and "history" metadata are now represented as Cube
- attributes, where previously they used coordinates.
-* :meth:`iris.cube.Cube.coord_dims()` now returns a tuple instead of a list.
-* The ``iris.plot.gcm`` and ``iris.plot.map_setup`` functions are now removed.
- See :ref:`whats-new-cartopy` for further details.
-
-Deprecations
-------------
-* The methods :meth:`iris.coords.Coord.cos()` and
- :meth:`iris.coords.Coord.sin()` have been deprecated.
-* The :func:`iris.load_strict()` function has been deprecated. Code
- should now use the :func:`iris.load_cube()` and
- :func:`iris.load_cubes()` functions instead.
+* Cube summaries are now more readable when the scalar coordinates
+ contain bounds.
CF-netCDF coordinate systems
-============================
+----------------------------
The coordinate systems in Iris are now defined by the CF-netCDF
-`grid mappings `_.
+`grid mappings `_.
As of Iris 1.0 a subset of the CF-netCDF coordinate systems are
supported, but this will be expanded in subsequent versions. Adding
this code is a relatively simple, incremental process - it would make a
@@ -79,13 +78,13 @@ contributing to the project.
The coordinate systems available in Iris 1.0 and their corresponding
Iris classes are:
-================================================================================================== =========================================
-CF name Iris class
-================================================================================================== =========================================
-`Latitude-longitude `_ :class:`~iris.coord_systems.GeogCS`
-`Rotated pole `_ :class:`~iris.coord_systems.RotatedGeogCS`
-`Transverse Mercator `_ :class:`~iris.coord_systems.TransverseMercator`
-================================================================================================== =========================================
+================================================================================================================= =========================================
+CF name Iris class
+================================================================================================================= =========================================
+`Latitude-longitude `_ :class:`~iris.coord_systems.GeogCS`
+`Rotated pole `_ :class:`~iris.coord_systems.RotatedGeogCS`
+`Transverse Mercator `_ :class:`~iris.coord_systems.TransverseMercator`
+================================================================================================================= =========================================
For convenience, Iris also includes the :class:`~iris.coord_systems.OSGB`
class which provides a simple way to create the transverse Mercator
@@ -96,7 +95,7 @@ coordinate system used by the British
.. _whats-new-cartopy:
Using Cartopy for mapping in matplotlib
-=======================================
+---------------------------------------
The underlying map drawing package has now been updated to use
`Cartopy `_. Cartopy provides a
@@ -143,12 +142,11 @@ For more examples of what can be done with Cartopy, see the Iris gallery and
Hybrid-pressure
-===============
+---------------
With the introduction of the :class:`~iris.aux_factory.HybridPressureFactory`
class, it is now possible to represent data expressed on a
-hybrid-pressure vertical coordinate, as defined by the second variant in
-`Appendix D `_.
+`hybrid-pressure vertical coordinate `_.
A hybrid-pressure factory is created with references to the coordinates
which provide the components of the hybrid coordinate ("ap" and "b") and
the surface pressure. In return, it provides a virtual "pressure"
@@ -160,11 +158,11 @@ the derived "pressure" coordinate for certain data [#f1]_ from the
.. [#f1] Where the level type is either 105 or 119, and where the
surface pressure has an ECMWF paramId of
- `152 `_.
+ `152 `_.
NetCDF
-======
+------
When saving a Cube to a netCDF file, Iris will now define the outermost
dimension as an unlimited/record dimension. In combination with the
@@ -190,7 +188,7 @@ processes.
Brewer colour palettes
-======================
+----------------------
Iris includes a selection of carefully designed colour palettes produced
by Cynthia Brewer. The :mod:`iris.palette` module registers the Brewer
@@ -216,7 +214,7 @@ in the citation guidance provided by Cynthia Brewer.
Metadata attributes
-===================
+-------------------
Iris now stores "source" and "history" metadata in Cube attributes.
For example::
@@ -250,7 +248,7 @@ Where previously it would have appeared as::
New loading functions
-=====================
+---------------------
The main functions for loading cubes are now:
- :func:`iris.load()`
@@ -273,7 +271,7 @@ functions instead.
Cube projection
-===============
+---------------
Iris now has the ability to project a cube into a number of map projections.
This functionality is provided by :func:`iris.analysis.cartography.project()`.
@@ -310,7 +308,24 @@ preserved. This function currently assumes global data and will if
necessary extrapolate beyond the geographical extent of the source cube.
-Other changes
-=============
-* Cube summaries are now more readable when the scalar coordinates
- contain bounds.
+Incompatible changes
+====================
+
+* The "source" and "history" metadata are now represented as Cube
+ attributes, where previously they used coordinates.
+
+* :meth:`iris.cube.Cube.coord_dims()` now returns a tuple instead of a list.
+
+* The ``iris.plot.gcm`` and ``iris.plot.map_setup`` functions are now removed.
+ See :ref:`whats-new-cartopy` for further details.
+
+
+Deprecations
+============
+
+* The methods :meth:`iris.coords.Coord.cos()` and
+ :meth:`iris.coords.Coord.sin()` have been deprecated.
+
+* The :func:`iris.load_strict()` function has been deprecated. Code
+ should now use the :func:`iris.load_cube()` and
+ :func:`iris.load_cubes()` functions instead.
diff --git a/docs/iris/src/whatsnew/1.1.rst b/docs/iris/src/whatsnew/1.1.rst
index 274ec65ff6..ea85dbc42c 100644
--- a/docs/iris/src/whatsnew/1.1.rst
+++ b/docs/iris/src/whatsnew/1.1.rst
@@ -1,71 +1,64 @@
-What's new in Iris 1.1
-**********************
+v1.1 (03 Jan 2013)
+******************
-:Release: 1.1.0
-:Date: 7 Dec, 2012
-
-This document explains the new/changed features of Iris in version 1.1.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-With the release of Iris 1.1, we are introducing support for Mac OS X.
-Version 1.1 also sees the first batch of performance enhancements, with
-some notable improvements to netCDF/PP import.
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
-Iris 1.1 features
-=================
+Features
+========
-A summary of the main features added with version 1.1:
+With the release of Iris 1.1, we are introducing support for Mac OS X.
+Version 1.1 also sees the first batch of performance enhancements, with
+some notable improvements to netCDF/PP import.
* Support for Mac OS X.
+
* GRIB1 import now supports time units of "3 hours".
+
* Fieldsfile import now supports unpacked and "CRAY" 32-bit packed data
in 64-bit Fieldsfiles.
+
* PP file import now supports "CRAY" 32-bit packed data.
+
* Various performance improvements, particularly for netCDF import,
PP import, and constraints.
+
* GRIB2 export now supports level types of altitude and height
(codes 102 and 103).
+
* iris.analysis.cartography.area_weights now supports non-standard
dimension orders.
+
* PP file import now adds the "forecast_reference_time" for fields
where LBTIM is 11, 12, 13, 31, or 32.
+
* PP file import now supports LBTIM values of 1, 2, and 3.
+
* Fieldsfile import now has some support for ancillary files.
+
* Coordinate categorisation functions added for day-of-year and
user-defined seasons.
+
* GRIB2 import now has partial support for probability data defined
with product template 4.9.
-Bugs fixed
-----------
-* PP export no longer attempts to set/overwrite the STASH code based on
- the standard_name.
-* Cell comparisons now work consistently, which fixes a bug where
- bounded_cell > point_cell compares the point to the bounds but,
- point_cell < bounded_cell compares the points.
-* Fieldsfile import now correctly recognises pre v3.1 and post v5.2
- versions, which fixes a bug where the two were interchanged.
-* iris.analysis.trajectory.interpolate now handles hybrid-height.
-
-Incompatible changes
---------------------
-* N/A
-
-Deprecations
-------------
-* N/A
-
Coordinate categorisation
-=========================
+-------------------------
An :func:`~iris.coord_categorisation.add_day_of_year` categorisation
function has been added to the existing suite in
:mod:`iris.coord_categorisation`.
+
Custom seasons
---------------
+~~~~~~~~~~~~~~
The conventional seasonal categorisation functions have been
complemented by two groups of functions which handle user-defined,
@@ -97,3 +90,19 @@ The other custom season function is:
This function adds a coordinate containing True/False values determined
by membership of a single custom season.
+
+
+Bugs fixed
+==========
+
+* PP export no longer attempts to set/overwrite the STASH code based on
+ the standard_name.
+
+* Cell comparisons now work consistently, which fixes a bug where
+ bounded_cell > point_cell compares the point to the bounds but,
+ point_cell < bounded_cell compares the points.
+
+* Fieldsfile import now correctly recognises pre v3.1 and post v5.2
+ versions, which fixes a bug where the two were interchanged.
+
+* iris.analysis.trajectory.interpolate now handles hybrid-height.
diff --git a/docs/iris/src/whatsnew/1.10.rst b/docs/iris/src/whatsnew/1.10.rst
index 26f21c0252..b5dfc1974b 100644
--- a/docs/iris/src/whatsnew/1.10.rst
+++ b/docs/iris/src/whatsnew/1.10.rst
@@ -1,14 +1,18 @@
-What's New in Iris 1.10
-***********************
+v1.10 (05 Sep 2016)
+*********************
-:Release: 1.10
-:Date: 5th September 2016
-
-This document explains the new/changed features of Iris in version 1.10
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.10 Features
-==================
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
+
.. _iris_grib_added:
* Support has now been added for the
@@ -19,11 +23,11 @@ Iris 1.10 Features
iris module :mod:`iris.fileformats.grib`.
* The capabilities of ``iris_grib`` are essentially the same as the existing
- :mod:`iris.fileformats.grib` when used with ``iris.FUTURE.strict_grib_load=True``,
- with only small detail differences.
+ :mod:`iris.fileformats.grib` when used with
+ ``iris.FUTURE.strict_grib_load=True``, with only small detail differences.
- * The old :mod:`iris.fileformats.grib` module is now deprecated and may shortly be
- removed.
+ * The old :mod:`iris.fileformats.grib` module is now deprecated and may
+ shortly be removed.
* If you are already using the recommended :data:`iris.FUTURE` setting
``iris.FUTURE.strict_grib_load=True`` this should not cause problems, as
@@ -44,79 +48,204 @@ Iris 1.10 Features
any problems you uncover, such as files that will no longer load with the
new implementation.
-* :meth:`iris.experimental.regrid.PointInCell.regridder` now works across coordinate systems, including non latlon systems. Additionally, the requirement that the source data X and Y coordinates be 2D has been removed. NB: some aspects of this change are backwards incompatible.
-* Plotting non-Gregorian calendars is now supported. This adds `nc_time_axis `_ as a dependency.
-* Promoting a scalar coordinate to a dimension coordinate with :func:`iris.util.new_axis` no longer loads deferred data.
-* The parsing functionality for Cell Methods from netCDF files is available as part of the :mod:`iris.fileformats.netcdf` module as :func:`iris.fileformats.netcdf.parse_cell_methods`.
-* Support for the NameIII Version 2 file format has been added.
-* Loading netcdf data in Mercator and Stereographic projections now accepts optional extra projection parameter attributes (``false_easting``, ``false_northing`` and ``scale_factor_at_projection_origin``), if they match the default values.
+* :meth:`iris.experimental.regrid.PointInCell.regridder` now works across
+ coordinate systems, including non latlon systems. Additionally, the
+ requirement that the source data X and Y coordinates be 2D has been removed.
+ NB: some aspects of this change are backwards incompatible.
- * NetCDF files which define a Mercator projection where the ``false_easting``, ``false_northing`` and ``scale_factor_at_projection_origin`` match the defaults will have the projection loaded correctly. Otherwise, a warning will be issued for each parameter that does not match the default and the projection will not be loaded.
- * NetCDF files which define a Steroegraphic projection where the ``scale_factor_at_projection_origin`` is equal to 1.0 will have the projection loaded correctly. Otherwise, a warning will be issued and the projection will not be loaded.
+* Plotting non-Gregorian calendars is now supported. This adds
+ `nc_time_axis `_ as a dependency.
-* The :mod:`iris.plot` routines :func:`~iris.plot.contour`, :func:`~iris.plot.contourf`, :func:`~iris.plot.outline`, :func:`~iris.plot.pcolor`, :func:`~iris.plot.pcolormesh` and :func:`~iris.plot.points` now support plotting cubes with anonymous dimensions by specifying the *numeric index* of the anonymous dimension within the ``coords`` keyword argument.
+* Promoting a scalar coordinate to a dimension coordinate with
+ :func:`iris.util.new_axis` no longer loads deferred data.
+
+* The parsing functionality for Cell Methods from netCDF files is available
+ as part of the :mod:`iris.fileformats.netcdf` module as
+ :func:`iris.fileformats.netcdf.parse_cell_methods`.
+
+* Support for the NameIII Version 2 file format has been added.
+
+* Loading netcdf data in Mercator and Stereographic projections now accepts
+ optional extra projection parameter attributes (``false_easting``,
+ ``false_northing`` and ``scale_factor_at_projection_origin``), if they match
+ the default values.
+
+ * NetCDF files which define a Mercator projection where the
+ ``false_easting``, ``false_northing`` and
+ ``scale_factor_at_projection_origin`` match the defaults will have the
+ projection loaded correctly. Otherwise, a warning will be issued for each
+ parameter that does not match the default and the projection will not be
+ loaded.
+
+ * NetCDF files which define a Steroegraphic projection where the
+ ``scale_factor_at_projection_origin`` is equal to 1.0 will have the
+ projection loaded correctly. Otherwise, a warning will be issued and the
+ projection will not be loaded.
+
+* The :mod:`iris.plot` routines :func:`~iris.plot.contour`,
+ :func:`~iris.plot.contourf`, :func:`~iris.plot.outline`,
+ :func:`~iris.plot.pcolor`, :func:`~iris.plot.pcolormesh` and
+ :func:`~iris.plot.points` now support plotting cubes with anonymous
+ dimensions by specifying the *numeric index* of the anonymous dimension
+ within the ``coords`` keyword argument.
Note that the axis of the anonymous dimension will be plotted in index space.
-* NetCDF loading and saving now supports Cubes that use the LambertConformal coordinate system.
-* The experimental structured Fieldsfile loader :func:`~iris.experimental.fieldsfile.load` has been extended to also load structured PP files.
+* NetCDF loading and saving now supports Cubes that use the LambertConformal
+ coordinate system.
- Structured loading is a streamlined operation, offering the benefit of a significantly faster loading alternative to the more generic :func:`iris.load` mechanism.
+* The experimental structured Fieldsfile loader
+ :func:`~iris.experimental.fieldsfile.load` has been extended to also load
+ structured PP files.
- Note that structured loading is not an optimised wholesale replacement of :func:`iris.load`. Structured loading is restricted to input containing contiguously ordered fields for each phenomenon that repeat regularly over the same vertical levels and times. For further details, see :func:`~iris.experimental.fieldsfile.load`
+ Structured loading is a streamlined operation, offering the benefit of a
+ significantly faster loading alternative to the more generic
+ :func:`iris.load` mechanism.
+
+ Note that structured loading is not an optimised wholesale replacement of
+ :func:`iris.load`. Structured loading is restricted to input containing
+ contiguously ordered fields for each phenomenon that repeat regularly over
+ the same vertical levels and times. For further details, see
+ :func:`~iris.experimental.fieldsfile.load`
* :mod:`iris.experimental.regrid_conservative` is now compatible with ESMPy v7.
-* Saving zonal (i.e. longitudinal) means to PP files now sets the '64s' bit in LBPROC.
+
+* Saving zonal (i.e. longitudinal) means to PP files now sets the '64s' bit in
+ LBPROC.
+
* Loading of 'little-endian' PP files is now supported.
-* All appropriate :mod:`iris.plot` functions now handle an ``axes`` keyword, allowing use of the object oriented matplotlib interface rather than pyplot.
-* The ability to pass file format object lists into the rules based load pipeline, as used for GRIB, Fields Files and PP has been added. The :func:`iris.fileformats.pp.load_pairs_from_fields` and :func:`iris.fileformats.grib.load_pairs_from_fields` are provided to produce cubes from such lists. These lists may have been filtered or altered using the appropriate :mod:`iris.fileformats` modules.
-* Cubes can now have an 'hour' coordinate added with :meth:`iris.coord_categorisation.add_hour`.
-* Time coordinates from PP fields with an lbcode of the form 3xx23 are now correctly encoded with a 360-day calendar.
-* The loading from and saving to netCDF of CF cell_measure variables is supported, along with their representation within a Cube as :attr:`~iris.cube.Cube.cell_measures`.
-* Cubes with anonymous dimensions can now be concatenated. This can only occur along a dimension that is not anonymous.
-* NetCDF saving of ``valid_range``, ``valid_min`` and ``valid_max`` cube attributes is now allowed.
-
-Bugs Fixed
+
+* All appropriate :mod:`iris.plot` functions now handle an ``axes`` keyword,
+ allowing use of the object oriented matplotlib interface rather than pyplot.
+
+* The ability to pass file format object lists into the rules based load
+ pipeline, as used for GRIB, Fields Files and PP has been added. The
+ :func:`iris.fileformats.pp.load_pairs_from_fields` and
+ :func:`iris.fileformats.grib.load_pairs_from_fields` are provided to produce
+ cubes from such lists. These lists may have been filtered or altered using
+ the appropriate :mod:`iris.fileformats` modules.
+
+* Cubes can now have an 'hour' coordinate added with
+ :meth:`iris.coord_categorisation.add_hour`.
+
+* Time coordinates from PP fields with an lbcode of the form 3xx23 are now
+ correctly encoded with a 360-day calendar.
+
+* The loading from and saving to netCDF of CF cell_measure variables is
+ supported, along with their representation within a Cube as
+ :attr:`~iris.cube.Cube.cell_measures`.
+
+* Cubes with anonymous dimensions can now be concatenated. This can only occur
+ along a dimension that is not anonymous.
+
+* NetCDF saving of ``valid_range``, ``valid_min`` and ``valid_max`` cube
+ attributes is now allowed.
+
+
+Bugs fixed
==========
-* Altered Cell Methods to display coordinate's standard_name rather than var_name where appropriate to avoid human confusion.
-* Saving multiple cubes with netCDF4 protected attributes should now work as expected.
-* Concatenating cubes with singleton dimensions (dimensions of size one) now works properly.
-* Fixed the ``grid_mapping_name`` and ``secant_latitudes`` handling for the LambertConformal coordinate system.
-* Fixed bug in :func:`iris.analysis.cartography.project` where the output projection coordinates didn't have units.
-* Attempting to use :meth:`iris.sample_data_path` to access a file that isn't actually Iris sample data now raises a more descriptive error. A note about the appropriate use of `sample_data_path` has also been added to the documentation.
-* Fixed a bug where regridding or interpolation with the :class:`~iris.analysis.Nearest` scheme returned floating-point results even when the source data was integer typed. It now always returns the same type as the source data.
-* Fixed a bug where regridding circular data would ignore any source masking. This affected any regridding using the :class:`~iris.analysis.Linear` and :class:`~iris.analysis.Nearest` schemes, and also :func:`iris.analysis.interpolate.linear`.
-* The ``coord_name`` parameter to :func:`~iris.fileformats.rules.scalar_cell_method` is now checked correctly.
-* LBPROC is set correctly when a cube containing the minimum of a variable is saved to a PP file. The IA component of LBTIM is set correctly when saving maximum or minimum values.
-* The performance of :meth:`iris.cube.Cube.extract` when a list of values is given to an instance of :class:`iris.Constraint` has been improved considerably.
-* Fixed a bug with :meth:`iris.cube.Cube.data` where an :class:`numpy.ndarray` was not being returned for scalar cubes with lazy data.
-* When saving in netcdf format, the units of 'latitude' and 'longitude' coordinates specified in 'degrees' are saved as 'degrees_north' and 'degrees_east' respectively, as defined in the CF conventions for netCDF files: sections 4.1 and 4.2.
-* Fixed a bug with a class of pp files with lbyr == 0, where the date would cause errors when converting to a datetime object (e.g. when printing a cube).
-
- When processing a pp field with lbtim = 2x, lbyr == lbyrd == 0 and lbmon == lbmond, 'month' and 'month_number' coordinates are created instead of 'time'.
-
-* Fixed a bug in :meth:`~iris.analysis.calculus.curl` where the sign of the r-component for spherical coordinates was opposite to what was expected.
+
+* Altered Cell Methods to display coordinate's standard_name rather than
+ var_name where appropriate to avoid human confusion.
+
+* Saving multiple cubes with netCDF4 protected attributes should now work as
+ expected.
+
+* Concatenating cubes with singleton dimensions (dimensions of size one) now
+ works properly.
+
+* Fixed the ``grid_mapping_name`` and ``secant_latitudes`` handling for the
+ LambertConformal coordinate system.
+
+* Fixed bug in :func:`iris.analysis.cartography.project` where the output
+ projection coordinates didn't have units.
+
+* Attempting to use :meth:`iris.sample_data_path` to access a file that isn't
+ actually Iris sample data now raises a more descriptive error. A note about
+ the appropriate use of `sample_data_path` has also been added to the
+ documentation.
+
+* Fixed a bug where regridding or interpolation with the
+ :class:`~iris.analysis.Nearest` scheme returned floating-point results even
+ when the source data was integer typed. It now always returns the same type
+ as the source data.
+
+* Fixed a bug where regridding circular data would ignore any source masking.
+ This affected any regridding using the :class:`~iris.analysis.Linear` and
+ :class:`~iris.analysis.Nearest` schemes, and also
+ :func:`iris.analysis.interpolate.linear`.
+
+* The ``coord_name`` parameter to
+ :func:`~iris.fileformats.rules.scalar_cell_method` is now checked correctly.
+
+* LBPROC is set correctly when a cube containing the minimum of a variable is
+ saved to a PP file. The IA component of LBTIM is set correctly when saving
+ maximum or minimum values.
+
+* The performance of :meth:`iris.cube.Cube.extract` when a list of values is
+ given to an instance of :class:`iris.Constraint` has been improved
+ considerably.
+
+* Fixed a bug with :meth:`iris.cube.Cube.data` where an :class:`numpy.ndarray`
+ was not being returned for scalar cubes with lazy data.
+
+* When saving in netcdf format, the units of 'latitude' and 'longitude'
+ coordinates specified in 'degrees' are saved as 'degrees_north' and
+ 'degrees_east' respectively, as defined in the CF conventions for netCDF
+ files: sections 4.1 and 4.2.
+
+* Fixed a bug with a class of pp files with lbyr == 0, where the date would
+ cause errors when converting to a datetime object (e.g. when printing a cube).
+
+ When processing a pp field with lbtim = 2x, lbyr == lbyrd == 0 and
+ lbmon == lbmond, 'month' and 'month_number' coordinates are created instead
+ of 'time'.
+
+* Fixed a bug in :meth:`~iris.analysis.calculus.curl` where the sign of the
+ r-component for spherical coordinates was opposite to what was expected.
+
* A bug that prevented cube printing in some cases has been fixed.
-* Fixed a bug where a deepcopy of a :class:`~iris.coords.DimCoord` would have writable ``points`` and ``bounds`` arrays. These arrays can now no longer be modified in-place.
-* Concatenation no longer occurs when the auxiliary coordinates of the cubes do not match. This check is not applied to AuxCoords that span the dimension the concatenation is occuring along. This behaviour can be switched off by setting the ``check_aux_coords`` kwarg in :meth:`iris.cube.CubeList.concatenate` to False.
-* Fixed a bug in :meth:`iris.cube.Cube.subset` where an exception would be thrown while trying to subset over a non-dimensional scalar coordinate.
-Incompatible Changes
+* Fixed a bug where a deepcopy of a :class:`~iris.coords.DimCoord` would have
+ writeable ``points`` and ``bounds`` arrays. These arrays can now no longer be
+ modified in-place.
+
+* Concatenation no longer occurs when the auxiliary coordinates of the cubes do
+ not match. This check is not applied to AuxCoords that span the dimension the
+ concatenation is occurring along. This behaviour can be switched off by
+ setting the ``check_aux_coords`` kwarg in
+ :meth:`iris.cube.CubeList.concatenate` to False.
+
+* Fixed a bug in :meth:`iris.cube.Cube.subset` where an exception would be
+ thrown while trying to subset over a non-dimensional scalar coordinate.
+
+
+Incompatible changes
====================
-* The source and target for :meth:`iris.experimental.regrid.PointInCell.regridder` must now have defined coordinate systems (i.e. not ``None``). Additionally, the source data X and Y coordinates must have the same cube dimensions.
+
+* The source and target for
+ :meth:`iris.experimental.regrid.PointInCell.regridder` must now have defined
+ coordinate systems (i.e. not ``None``). Additionally, the source data X and Y
+ coordinates must have the same cube dimensions.
+
Deprecations
============
+
* Deprecated the :class:`iris.Future` option
``iris.FUTURE.strict_grib_load``.
This only affected the module :mod:`iris.fileformats.grib`, which is itself
now deprecated.
Please see :ref:`iris_grib package `, above.
+
* Deprecated the module :mod:`iris.fileformats.grib`. The new package
`iris_grib `_ replaces this
- fuctionality, which will shortly be removed.
+ functionality, which will shortly be removed.
Please see :ref:`iris_grib package `, above.
-* The use of :data:`iris.config.SAMPLE_DATA_DIR` has been deprecated and replaced by the now importable `iris_sample_data `_ package.
+
+* The use of :data:`iris.config.SAMPLE_DATA_DIR` has been deprecated and
+ replaced by the now importable
+ `iris_sample_data `_ package.
* Deprecated the module :mod:`iris.analysis.interpolate`.
This contains the following public items, all of which are now deprecated and
@@ -132,21 +261,38 @@ Deprecations
Please use the replacement facilities individually noted in the module
documentation for :mod:`iris.analysis.interpolate`
+
* The method :meth:`iris.cube.Cube.regridded` has been deprecated.
Please use :meth:`iris.cube.Cube.regrid` instead (see
:meth:`~iris.cube.Cube.regridded` for details).
-* Deprecated :data:`iris.fileformats.grib.hindcast_workaround` and :class:`iris.fileformats.grib.GribWrapper`. The class :class:`iris.fileformats.grib.message.GribMessage` provides alternative means of working with GRIB message instances.
+
+* Deprecated :data:`iris.fileformats.grib.hindcast_workaround` and
+ :class:`iris.fileformats.grib.GribWrapper`. The class
+ :class:`iris.fileformats.grib.message.GribMessage` provides alternative means
+ of working with GRIB message instances.
+
* Deprecated the module :mod:`iris.fileformats.ff`. Please use the replacement
facilities in module :mod:`iris.fileformats.um` :
- * :func:`iris.fileformats.um.um_to_pp` replaces :class:`iris.fileformats.ff.FF2PP`.
- * :func:`iris.fileformats.um.load_cubes` replaces :func:`iris.fileformats.ff.load_cubes`.
- * :func:`iris.fileformats.um.load_cubes_32bit_ieee` replaces :func:`iris.fileformats.ff.load_cubes_32bit_ieee`.
+ * :func:`iris.fileformats.um.um_to_pp` replaces
+ :class:`iris.fileformats.ff.FF2PP`.
+ * :func:`iris.fileformats.um.load_cubes` replaces
+ :func:`iris.fileformats.ff.load_cubes`.
+ * :func:`iris.fileformats.um.load_cubes_32bit_ieee` replaces
+ :func:`iris.fileformats.ff.load_cubes_32bit_ieee`.
+
+ All other public components are generally deprecated and will be removed in a
+ future release.
+
+* The :func:`iris.fileformats.pp.as_pairs` and
+ :func:`iris.fileformats.grib.as_pairs` are deprecated. These are replaced
+ with :func:`iris.fileformats.pp.save_pairs_from_cube` and
+ :func:`iris.fileformats.grib.save_pairs_from_cube`.
- All other public components are generally deprecated and will be removed in a future release.
+* ``iris.fileformats.pp_packing`` has been deprecated. Please install the
+ separate `mo_pack `_ package instead.
+ This provides the same functionality.
-* The :func:`iris.fileformats.pp.as_pairs` and :func:`iris.fileformats.grib.as_pairs` are deprecated. These are replaced with :func:`iris.fileformats.pp.save_pairs_from_cube` and :func:`iris.fileformats.grib.save_pairs_from_cube`.
-* ``iris.fileformats.pp_packing`` has been deprecated. Please install the separate `mo_pack `_ package instead. This provides the same functionality.
* Deprecated logging functions (currently used only for rules logging):
:data:`iris.config.iris.config.RULE_LOG_DIR`,
:data:`iris.config.iris.config.RULE_LOG_IGNORE` and
@@ -163,14 +309,37 @@ Deprecations
:class:`iris.fileformats.rules.RulesContainer` and
:func:`iris.fileformats.rules.calculate_forecast_period`.
-* Deprecated the custom pp save rules mechanism implemented by the functions :func:`iris.fileformats.pp.add_save_rules` and :func:`iris.fileformats.pp.reset_save_rules`. The functions :func:`iris.fileformats.pp.as_fields`, :func:`iris.fileformats.pp.as_pairs` and :func:`iris.fileformats.pp.save_fields` provide alternative means of achieving the same ends.
+* Deprecated the custom pp save rules mechanism implemented by the functions
+ :func:`iris.fileformats.pp.add_save_rules` and
+ :func:`iris.fileformats.pp.reset_save_rules`. The functions
+ :func:`iris.fileformats.pp.as_fields`, :func:`iris.fileformats.pp.as_pairs`
+ and :func:`iris.fileformats.pp.save_fields` provide alternative means of
+ achieving the same ends.
+
+
+Documentation
+=============
+
+* It is now clear that repeated values will form a group under
+ :meth:`iris.cube.Cube.aggregated_by` even if they aren't consecutive. Hence,
+ the documentation for :mod:`iris.cube` has been changed to reflect this.
+
+* The documentation for :meth:`iris.analysis.calculus.curl` has been updated
+ for clarity.
+
+* False claims about :meth:`iris.fileformats.pp.save`,
+ :meth:`iris.fileformats.pp.as_pairs`, and
+ :meth:`iris.fileformats.pp.as_fields` being able to take instances of
+ :class:`iris.cube.CubeList` as inputs have been removed.
+
+* A new code example
+ :ref:`sphx_glr_generated_gallery_meteorology_plot_wind_speed.py`,
+ demonstrating the use of a quiver plot to display wind speeds over Lake
+ Victoria, has been added.
+
+* The docstring for :data:`iris.analysis.SUM` has been updated to explicitly
+ state that weights passed to it aren't normalised internally.
-Documentation Changes
-=====================
-* It is now clear that repeated values will form a group under :meth:`iris.cube.Cube.aggregated_by` even if they aren't consecutive. Hence, the documentation for :mod:`iris.cube` has been changed to reflect this.
-* The documentation for :meth:`iris.analysis.calculus.curl` has been updated for clarity.
-* False claims about :meth:`iris.fileformats.pp.save`, :meth:`iris.fileformats.pp.as_pairs`, and :meth:`iris.fileformats.pp.as_fields` being able to take instances of :class:`iris.cube.CubeList` as inputs have been removed.
-* A :doc:`new code example <../examples/Meteorology/wind_speed>`, demonstrating the use of a quiver plot to display wind speeds over Lake Victoria, has been added.
-* The docstring for :data:`iris.analysis.SUM` has been updated to explicitly state that weights passed to it aren't normalised internally.
-* A note regarding the impossibility of partially collapsing multi-dimensional coordinates has been added to the user guide.
+* A note regarding the impossibility of partially collapsing multi-dimensional
+ coordinates has been added to the user guide.
diff --git a/docs/iris/src/whatsnew/1.11.rst b/docs/iris/src/whatsnew/1.11.rst
index eb93ec2f8c..d04355b800 100644
--- a/docs/iris/src/whatsnew/1.11.rst
+++ b/docs/iris/src/whatsnew/1.11.rst
@@ -1,31 +1,45 @@
-What's New in Iris 1.11
-***********************
+v1.11 (29 Oct 2016)
+*********************
-:Release: 1.11
-:Date: 2016-11-28
-
-This document explains the new/changed features of Iris in version 1.11
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.11 Features
-==================
-* If available, display the ``STASH`` code instead of ``unknown / (unknown)`` when printing cubes
- with no ``standard_name`` and no ``units``.
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
+
+* If available, display the ``STASH`` code instead of ``unknown / (unknown)``
+ when printing cubes with no ``standard_name`` and no ``units``.
+
* Support for saving to netCDF with data packing has been added.
-* The coordinate system :class:`iris.coord_systems.LambertAzimuthalEqualArea` has been added with NetCDF saving support.
-Bugs Fixed
+* The coordinate system :class:`iris.coord_systems.LambertAzimuthalEqualArea`
+ has been added with NetCDF saving support.
+
+Bugs fixed
==========
-* Fixed a floating point tolerance bug in :func:`iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid`
+
+* Fixed a floating point tolerance bug in
+ :func:`iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid`
for wrapped longitudes.
-* Allow :func:`iris.util.new_axis` to promote the nominated scalar coordinate of a cube
- with a scalar masked constant data payload.
-* Fixed a bug where :func:`iris.util._is_circular` would erroneously return false
- when coordinate values are decreasing.
-* When saving to NetCDF, the existing behaviour of writing string attributes as ASCII has been
- maintained across known versions of netCDF4-python.
-
-Documentation Changes
-=====================
+
+* Allow :func:`iris.util.new_axis` to promote the nominated scalar coordinate
+ of a cube with a scalar masked constant data payload.
+
+* Fixed a bug where :func:`iris.util._is_circular` would erroneously return
+ false when coordinate values are decreasing.
+
+* When saving to NetCDF, the existing behaviour of writing string attributes
+ as ASCII has been maintained across known versions of netCDF4-python.
+
+
+Documentation
+=============
+
* Fuller doc-string detail added to :func:`iris.analysis.cartography.unrotate_pole` and
:func:`iris.analysis.cartography.rotate_pole`.
diff --git a/docs/iris/src/whatsnew/1.12.rst b/docs/iris/src/whatsnew/1.12.rst
index 59ea47d876..1d7fc8f978 100644
--- a/docs/iris/src/whatsnew/1.12.rst
+++ b/docs/iris/src/whatsnew/1.12.rst
@@ -1,14 +1,18 @@
-What's New in Iris 1.12
-***********************
+v1.12 (31 Jan 2017)
+*********************
-:Release: 1.12
-:Date: 2017-01-30
-
-This document explains the new/changed features of Iris in version 1.12
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.12 Features
-==================
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
+
.. _showcase:
.. admonition:: Showcase Feature: New regridding schemes
@@ -121,11 +125,13 @@ Iris 1.12 Features
Deprecations
============
+
* The module :mod:`iris.experimental.fieldsfile` has been deprecated, in favour
of the new fast-loading mechanism provided by
:meth:`iris.fileformats.um.structured_um_loading`.
-Documentation Changes
-=====================
+Documentation
+=============
+
* Corrected documentation of :class:`iris.analysis.AreaWeighted` scheme to make
the usage scope clearer.
diff --git a/docs/iris/src/whatsnew/1.13.rst b/docs/iris/src/whatsnew/1.13.rst
index 532c160f13..30b3731d96 100644
--- a/docs/iris/src/whatsnew/1.13.rst
+++ b/docs/iris/src/whatsnew/1.13.rst
@@ -1,37 +1,78 @@
-What's New in Iris 1.13
-***********************
+v1.13 (17 May 2017)
+*************************
-:Release: 1.13
-:Date: 2017-05-17
+This document explains the changes made to Iris for this release
+(:doc:`View all changes `.)
-This document explains the new/changed features of Iris in version 1.13
-(:doc:`View all changes `.)
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
-Iris 1.13 Features
-==================
+* Allow the reading of NAME trajectories stored by time instead of by particle
+ number.
-* Allow the reading of NAME trajectories stored by time instead of by particle number.
* An experimental link to python-stratify via :mod:`iris.experimental.stratify`.
-* Data arrays may be shared between cubes, and subsets of cubes, by using the :meth:`iris.cube.share_data` flag.
+* Data arrays may be shared between cubes, and subsets of cubes, by using the
+ :meth:`iris.cube.share_data` flag.
-Bug Fixes
+
+Bug fixes
=========
-* The bounds are now set correctly on the longitude coordinate if a zonal mean diagnostic has been loaded from a PP file as per the CF Standard.
-* NetCDF loading will now determine whether there is a string-valued scalar label, i.e. a character variable that only has one dimension (the length of the string), and interpret this correctly.
-* A line plot of geographic coordinates (e.g. drawing a trajectory) wraps around the edge of the map cleanly, rather than plotting a segment straight across the map.
-* When saving to PP, lazy data is preserved when generating PP fields from cubes so that a list of cubes can be saved to PP without excessive memory requirements.
-* An error is now correctly raised if a user tries to perform an arithmetic operation on two cubes with mismatching coordinates. Previously these cases were caught by the add and subtract operators, and now it is also caught by the multiply and divide operators.
-* Limited area Rotated Pole datasets where the data range is ``0 <= lambda < 360``, for example as produced in New Zealand, are plotted over a sensible map extent by default.
-* Removed the potential for a RuntimeWarning: overflow encountered in ``int_scalars`` which was missed during collapsed calculations. This could trip up unwary users of limited data types, such as int32 for very large numbers (e.g. seconds since 1970).
-* The CF conventions state that certain ``formula_terms`` terms may be omitted and assumed to be zero (http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#dimensionless-v-coord) so Iris now allows factories to be constructed with missing terms.
-* In the User Guide's contour plot example, clabel inline is set to be False so that it renders correctly, avoiding spurious horizontal lines across plots, although this does make labels a little harder to see.
-* The computation of area weights has been changed to a more numerically stable form. The previous form converted latitude to colatitude and used difference of cosines in the cell area computation. This formulation uses latitude and difference of sines. The conversion from latitude to colatitude at lower precision causes errors when computing the cell areas.
+* The bounds are now set correctly on the longitude coordinate if a zonal mean
+ diagnostic has been loaded from a PP file as per the CF Standard.
+
+* NetCDF loading will now determine whether there is a string-valued scalar
+ label, i.e. a character variable that only has one dimension (the length of
+ the string), and interpret this correctly.
+
+* A line plot of geographic coordinates (e.g. drawing a trajectory) wraps
+ around the edge of the map cleanly, rather than plotting a segment straight
+ across the map.
+
+* When saving to PP, lazy data is preserved when generating PP fields from
+ cubes so that a list of cubes can be saved to PP without excessive memory
+ requirements.
+
+* An error is now correctly raised if a user tries to perform an arithmetic
+ operation on two cubes with mismatching coordinates. Previously these cases
+ were caught by the add and subtract operators, and now it is also caught by
+ the multiply and divide operators.
+
+* Limited area Rotated Pole datasets where the data range is
+ ``0 <= lambda < 360``, for example as produced in New Zealand, are plotted
+ over a sensible map extent by default.
+
+* Removed the potential for a RuntimeWarning: overflow encountered in
+ ``int_scalars`` which was missed during collapsed calculations. This could
+ trip up unwary users of limited data types, such as int32 for very large
+ numbers (e.g. seconds since 1970).
+
+* The CF conventions state that certain ``formula_terms`` terms may be omitted
+ and assumed to be zero
+ (http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#dimensionless-v-coord)
+ so Iris now allows factories to be constructed with missing terms.
+
+* In the User Guide's contour plot example, clabel inline is set to be False
+ so that it renders correctly, avoiding spurious horizontal lines across
+ plots, although this does make labels a little harder to see.
+
+* The computation of area weights has been changed to a more numerically
+ stable form. The previous form converted latitude to colatitude and used
+ difference of cosines in the cell area computation. This formulation uses
+ latitude and difference of sines. The conversion from latitude to colatitude
+ at lower precision causes errors when computing the cell areas.
+
Testing
=======
-* Iris has adopted conda-forge to provide environments for continuous integration testing.
+* Iris has adopted conda-forge to provide environments for continuous
+ integration testing.
diff --git a/docs/iris/src/whatsnew/1.2.rst b/docs/iris/src/whatsnew/1.2.rst
index 720ae73376..982a68add6 100644
--- a/docs/iris/src/whatsnew/1.2.rst
+++ b/docs/iris/src/whatsnew/1.2.rst
@@ -1,16 +1,17 @@
-What's new in Iris 1.2
-**********************
+v1.2 (28 Feb 2013)
+******************
-:Release: 1.2.0
-:Date: 7th March 2013
-
-This document explains the new/changed features of Iris in version 1.2.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.2 features
-=================
-A summary of the main features added with version 1.2:
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
* :meth:`iris.cube.Cube.convert_units()` and
:meth:`iris.coords.Coord.convert_units()` have been added. This is
@@ -18,6 +19,7 @@ A summary of the main features added with version 1.2:
another. For example, to convert a cube in kelvin to celsius, one can now
call cube.convert_units('celsius'). The operation is in-place and if the
units are not convertible an exception will be raised.
+
* :attr:`iris.cube.Cube.var_name`, :attr:`iris.coords.Coord.var_name` and
:attr:`iris.aux_factory.AuxCoordFactory.var_name` attributes have been added.
This attribute represents the CF variable name of the object. It is populated
@@ -25,42 +27,57 @@ A summary of the main features added with version 1.2:
var_name keyword argument has also been added to the
:meth:`iris.cube.Cube.coord()`, :meth:`iris.cube.Cube.coords()` and
:meth:`iris.cube.Cube.aux_factory()` methods.
+
* :meth:`iris.coords.Coord.is_compatible()` has been added. This method is
used to determine whether two coordinates are sufficiently alike to
allow operations such as :meth:`iris.coords.Coord.intersect()` and
:func:`iris.analysis.interpolate.regrid()` to take place. A corresponding
method for cubes, :meth:`iris.cube.Cube.is_compatible()`, has also been
added.
+
* Printing a :class:`~iris.cube.Cube` is now more user friendly with regards
to dates and time. All *time* and *forecast_reference_time* scalar coordinates
now display human readable date/time information.
+
* The units of a :class:`~iris.cube.Cube` are now shown when it is printed.
+
* The area weights calculated by :func:`iris.analysis.cartography.area_weights`
may now be normalised relative to the total grid area.
-* Weights may now be passed to :meth:`iris.cube.Cube.rolling_window` aggregations,
- thus allowing arbitrary digital filters to be applied to a :class:`~iris.cube.Cube`.
+
+* Weights may now be passed to :meth:`iris.cube.Cube.rolling_window`
+ aggregations, thus allowing arbitrary digital filters to be applied to a
+ :class:`~iris.cube.Cube`.
+
Bugs fixed
-----------
+==========
+
* The GRIB hindcast interpretation of negative forecast times can be enabled
via the :data:`iris.fileformats.grib.hindcast_workaround` flag.
+
* The NIMROD file loader has been extended to cope with orography vertical
coordinates.
+
Incompatible changes
---------------------
+====================
+
* The deprecated :attr:`iris.cube.Cube.unit` and :attr:`iris.coords.Coord.unit`
attributes have been removed.
+
Deprecations
-------------
+============
+
* The :meth:`iris.coords.Coord.unit_converted()` method has been deprecated.
Users should make a copy of the coordinate using
:meth:`iris.coords.Coord.copy()` and then call the
:meth:`iris.coords.Coord.convert_units()` method of the new coordinate.
+
* With the addition of the var_name attribute the signatures of DimCoord and
AuxCoord have changed. This should have no impact if you are providing
parameters as keyword arguments, but it may cause issues if you are relying
on the position/order of the arguments.
+
* Iteration over a :class:`~iris.cube.Cube` has been deprecated. Instead,
users should use :meth:`iris.cube.Cube.slices`.
diff --git a/docs/iris/src/whatsnew/1.3.rst b/docs/iris/src/whatsnew/1.3.rst
index dbea08ad03..fd6f2cfef9 100644
--- a/docs/iris/src/whatsnew/1.3.rst
+++ b/docs/iris/src/whatsnew/1.3.rst
@@ -1,65 +1,42 @@
-What's new in Iris 1.3
-**********************
+v1.3 (27 Mar 2013)
+******************
-:Release: 1.3.0
-:Date: 27 March 2013
-
-This document explains the new/changed features of Iris in version 1.3.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.3 features
-=================
-A summary of the main features added with version 1.3:
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
* Experimental support for
:ref:`loading ABF/ABL files`.
+
* Support in :func:`iris.analysis.interpolate.linear` for longitude ranges
other than [-180, 180].
+
* Support for :ref:`customised CF profiles` on
export to netCDF.
+
* The documentation now includes guidance on
:ref:`how to cite Iris`.
+
* The ability to calculate the exponential of a Cube, via
:func:`iris.analysis.maths.exp()`.
+
* Experimental support for :ref:`concatenating Cubes`
along existing dimensions via
:func:`iris.experimental.concatenate.concatenate()`.
-Bugs fixed
-----------
-* Printing a Cube now supports Unicode attribute values.
-* PP export now sets LBMIN correctly.
-* Converting between reference times now works correctly for
- units with non-Gregorian calendars.
-* Slicing a :class:`~iris.cube.CubeList` now returns a
- :class:`~iris.cube.CubeList` instead of a normal list.
-
-Incompatible changes
---------------------
-* N/A
-
-Deprecations
-------------
-* The boolean methods/properties on the :class:`~iris.unit.Unit` class
- have been updated to `is_...()` methods, in line with the project's
- naming conventions.
-
- ====================================== ===========================================
- Deprecated property/method New method
- ====================================== ===========================================
- :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()`
- :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()`
- :attr:`~iris.unit.Unit.no_unit` :meth:`~iris.unit.Unit.is_no_unit()`
- :attr:`~iris.unit.Unit.time_reference` :meth:`~iris.unit.Unit.is_time_reference()`
- :attr:`~iris.unit.Unit.unknown` :meth:`~iris.unit.Unit.is_unknown()`
- ====================================== ===========================================
-
.. _whats-new-abf:
Loading ABF/ABL files
-=====================
+---------------------
Support for the ABF and ABL file formats (as
`defined `_ by the
@@ -80,7 +57,7 @@ For example::
.. _whats-new-cf-profile:
Customised CF profiles
-======================
+----------------------
Iris now provides hooks in the CF-netCDF export process to allow
user-defined routines to check and/or modify the representation in the
@@ -89,10 +66,13 @@ netCDF file.
The following keys within the ``iris.site_configuration`` dictionary have
been **reserved** as hooks to *external* user-defined CF profile functions:
- * ``cf_profile`` injests a :class:`iris.cube.Cube` for analysis and returns a profile result
- * ``cf_patch`` modifies the CF-netCDF file associated with export of the :class:`iris.cube.Cube`
+ * ``cf_profile`` ingests a :class:`iris.cube.Cube` for analysis and returns a
+ profile result
+ * ``cf_patch`` modifies the CF-netCDF file associated with export of the
+ :class:`iris.cube.Cube`
-The ``iris.site_configuration`` dictionary should be configured via the ``iris/site_config.py`` file.
+The ``iris.site_configuration`` dictionary should be configured via the
+``iris/site_config.py`` file.
For further implementation details see ``iris/fileformats/netcdf.py``.
@@ -100,7 +80,7 @@ For further implementation details see ``iris/fileformats/netcdf.py``.
.. _whats-new-concat:
Cube concatenation
-==================
+------------------
Iris now provides initial support for concatenating Cubes along one or
more existing dimensions. Currently this will force the data to be
@@ -126,3 +106,33 @@ combine these into a single Cube as follows::
As this is an experimental feature, your feedback is especially welcome.
+Bugs fixed
+==========
+
+* Printing a Cube now supports Unicode attribute values.
+
+* PP export now sets LBMIN correctly.
+
+* Converting between reference times now works correctly for
+ units with non-Gregorian calendars.
+
+* Slicing a :class:`~iris.cube.CubeList` now returns a
+ :class:`~iris.cube.CubeList` instead of a normal list.
+
+
+Deprecations
+============
+
+* The boolean methods/properties on the :class:`~iris.unit.Unit` class
+ have been updated to `is_...()` methods, in line with the project's
+ naming conventions.
+
+ ====================================== ===========================================
+ Deprecated property/method New method
+ ====================================== ===========================================
+ :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()`
+ :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()`
+ :attr:`~iris.unit.Unit.no_unit` :meth:`~iris.unit.Unit.is_no_unit()`
+ :attr:`~iris.unit.Unit.time_reference` :meth:`~iris.unit.Unit.is_time_reference()`
+ :attr:`~iris.unit.Unit.unknown` :meth:`~iris.unit.Unit.is_unknown()`
+ ====================================== ===========================================
diff --git a/docs/iris/src/whatsnew/1.4.rst b/docs/iris/src/whatsnew/1.4.rst
index 053a6e1096..7f96643f5f 100644
--- a/docs/iris/src/whatsnew/1.4.rst
+++ b/docs/iris/src/whatsnew/1.4.rst
@@ -1,96 +1,114 @@
-What's new in Iris 1.4
-**********************
+v1.4 (14 Jun 2013)
+******************
-:Release: 1.4.0
-:Date: 14 June 2013
-
-This document explains the new/changed features of Iris in version 1.4.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.4 features
-=================
-A summary of the main features added with version 1.4:
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
* Multiple cubes can now be exported to a NetCDF file.
+
* Correct nearest-neighbour calculation with circular coords.
+
* :ref:`Experimental regridding enhancements`.
+
* :ref:`Iris-Pandas interoperability`.
+
* NIMROD level type 12 (levels below ground) can now be loaded.
+
* :ref:`Load cubes from the internet via OPeNDAP`.
+
* :ref:`GeoTiff export (experimental)`.
+
* :ref:`Cube merge update`.
+
* :ref:`Unambiguous season year naming`.
+
* NIMROD files with multiple fields and period of interest can now be loaded.
+
* Missing values are now handled when loading GRIB messages.
+
* PP export rule to calculate forecast period.
+
* :func:`~iris.cube.Cube.aggregated_by` now maintains array masking.
+
* IEEE 32bit fieldsfiles can now be loaded.
+
* NetCDF transverse mercator and climatology data can now be loaded.
+
* Polar stereographic GRIB data can now be loaded.
+
* :ref:`Cubes with no vertical coord can now be exported to GRIB`.
+
* :ref:`Simplified resource configuration`.
+
* :ref:`Extended GRIB parameter translation`.
+
* Added an optimisation for single-valued coordinate constraints.
+
* :ref:`One dimensional linear interpolation fix`.
-* :ref:`Fix for iris.analysis.calculus.differentiate`.
-* Fixed pickling of cubes with 2D aux coords from NetCDF.
-* Fixed bug which ignored the "coords" keyword for certain plots.
-* Use the latest release of Cartopy, v0.8.0.
+* :ref:`Fix for iris.analysis.calculus.differentiate`.
-Incompatible changes
---------------------
-* As part of simplifying the mechanism for accessing test data,
- :func:`iris.io.select_data_path`, :data:`iris.config.DATA_REPOSITORY`,
- :data:`iris.config.MASTER_DATA_REPOSITORY` and
- :data:`iris.config.RESOURCE_DIR` have been removed.
+* Fixed pickling of cubes with 2D aux coords from NetCDF.
-Deprecations
-------------
-* The *add_custom_season_** functions from :mod:`~iris.coord_categorisation` have been deprecated in favour of adding their functionality to the *add_season_** functions
+* Fixed bug which ignored the "coords" keyword for certain plots.
+* Use the latest release of Cartopy, v0.8.0.
.. _OPeNDAP: http://www.opendap.org/about
-
-
.. _exp-regrid:
Experimental regridding enhancements
-====================================
+------------------------------------
+
+Bilinear, area-weighted and area-conservative regridding functions are now
+available in :mod:`iris.experimental`. These functions support masked data and
+handle derived coordinates such as hybrid height. The final API is still in
+development.
-Bilinear, area-weighted and area-conservative regridding functions are now available in
-:mod:`iris.experimental`. These functions support masked data and handle
-derived coordinates such as hybrid height. The final API is still in development.
In the meantime:
+
Bilinear rectilinear regridding
-------------------------------
+
:func:`~iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid`
-can be used to regrid a cube onto a horizontal grid defined in a different coordinate system.
-The data values are calculated using bilinear interpolation.
+can be used to regrid a cube onto a horizontal grid defined in a differentiate
+coordinate system. The data values are calculated using bilinear interpolation.
For example::
from iris.experimental.regrid import regrid_bilinear_rectilinear_src_and_grid
regridded_cube = regrid_bilinear_rectilinear_src_and_grid(source_cube, target_grid_cube)
+
Area-weighted regridding
------------------------
-:func:`~iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid` can be used to regrid a cube
-such that the data values of the resulting cube are calculated using the
-area-weighted mean.
+
+:func:`~iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid`
+can be used to regrid a cube such that the data values of the resulting cube
+are calculated using the area-weighted mean.
For example::
from iris.experimental.regrid import regrid_area_weighted_rectilinear_src_and_grid as regrid_area_weighted
regridded_cube = regrid_area_weighted(source_cube, target_grid_cube)
+
Area-conservative regridding
----------------------------
+
:func:`~iris.experimental.regrid_conservative.regrid_conservative_via_esmpy`
-can be used for area-conservative regridding between geographical coordinate systems.
-This uses the ESMF library functions, via the ESMPy interface.
+can be used for area-conservative regridding between geographical coordinate
+systems. This uses the ESMF library functions, via the ESMPy interface.
For example::
@@ -100,19 +118,21 @@ For example::
.. _iris-pandas:
-Iris-Pandas interoperablilty
-============================
+Iris-Pandas interoperability
+----------------------------
+
Conversion to and from Pandas Series_ and DataFrames_ is now available.
See :mod:`iris.pandas` for more details.
-.. _Series: http://pandas.pydata.org/pandas-docs/stable/api.html#series
-.. _DataFrames: http://pandas.pydata.org/pandas-docs/stable/api.html#dataframe
+.. _Series: https://pandas.pydata.org/pandas-docs/stable/reference/series.html
+.. _DataFrames: https://pandas.pydata.org/pandas-docs/stable/reference/frame.html
.. _load-opendap:
Load cubes from the internet via OPeNDAP
-========================================
+----------------------------------------
+
Cubes can now be loaded directly from the internet, via OPeNDAP_.
For example::
@@ -123,8 +143,10 @@ For example::
.. _geotiff_export:
GeoTiff export
-==============
-With this experimental feature, two dimensional cubes can now be exported to GeoTiff files.
+--------------
+
+With this experimental feature, two dimensional cubes can now be exported to
+GeoTiff files.
For example::
@@ -139,17 +161,20 @@ For example::
.. _cube-merge-update:
Cube merge update
-=================
+-----------------
+
Cube merging now favours numerical coordinates over string coordinates
to describe a dimension, and :class:`~iris.coords.DimCoord` over
:class:`~iris.coords.AuxCoord`. These modifications prevent the error:
-*"No functional relationship between separable and inseparable candidate dimensions"*.
+*"No functional relationship between separable and inseparable candidate
+dimensions"*.
.. _season-year-name:
Unambiguous season year naming
-==============================
+------------------------------
+
The default names of categorisation coordinates are now less ambiguous.
For example, :func:`~iris.coord_categorisation.add_month_number` and
:func:`~iris.coord_categorisation.add_month_fullname` now create
@@ -159,15 +184,18 @@ For example, :func:`~iris.coord_categorisation.add_month_number` and
.. _grib-novert:
Cubes with no vertical coord can now be exported to GRIB
-========================================================
+--------------------------------------------------------
+
Iris can now export cubes with no vertical coord to GRIB.
-The solution is still under discussion: See https://github.com/SciTools/iris/issues/519.
+The solution is still under discussion: See
+https://github.com/SciTools/iris/issues/519.
.. _simple_cfg:
Simplified resource configuration
-=================================
+---------------------------------
+
A new configuration variable called :data:`iris.config.TEST_DATA_DIR`
has been added, replacing the previous combination of
:data:`iris.config.MASTER_DATA_REPOSITORY` and
@@ -180,7 +208,8 @@ be set by adding a ``test_data_dir`` entry to the ``Resources`` section of
.. _grib_params:
Extended GRIB parameter translation
-===================================
+-----------------------------------
+
- More GRIB2 params are recognised on input.
- Now translates some codes on GRIB2 output.
- Some GRIB2 params may load with a different standard_name.
@@ -190,16 +219,37 @@ Extended GRIB parameter translation
.. _one-d-linear:
One dimensional linear interpolation fix
-========================================
-:func:`~iris.analysis.interpolate.linear` can now extrapolate from a single point
-assuming a gradient of zero. This prevents an issue when loading cross sections
-with a hybrid height coordinate, on a staggered grid and only a single orography field.
+----------------------------------------
+
+:func:`~iris.analysis.interpolate.linear` can now extrapolate from a single
+point assuming a gradient of zero. This prevents an issue when loading cross
+sections with a hybrid height coordinate, on a staggered grid and only a single
+orography field.
.. _calc-diff-fix:
Fix for iris.analysis.calculus.differentiate
-=============================================
-A bug in :func:`~iris.analysis.calculus.differentiate` that had the potential to cause
-the loss of coordinate metadata when calculating the curl or the derivative of a cube has been fixed.
+--------------------------------------------
+
+A bug in :func:`~iris.analysis.calculus.differentiate` that had the potential
+to cause the loss of coordinate metadata when calculating the curl or the
+derivative of a cube has been fixed.
+
+
+Incompatible changes
+====================
+
+* As part of simplifying the mechanism for accessing test data,
+ :func:`iris.io.select_data_path`, :data:`iris.config.DATA_REPOSITORY`,
+ :data:`iris.config.MASTER_DATA_REPOSITORY` and
+ :data:`iris.config.RESOURCE_DIR` have been removed.
+
+Deprecations
+============
+
+* The *add_custom_season_** functions from :mod:`~iris.coord_categorisation`
+ have been deprecated in favour of adding their functionality to the
+ *add_season_** functions
+
diff --git a/docs/iris/src/whatsnew/1.5.rst b/docs/iris/src/whatsnew/1.5.rst
index 7af1e40285..07f54e15cf 100644
--- a/docs/iris/src/whatsnew/1.5.rst
+++ b/docs/iris/src/whatsnew/1.5.rst
@@ -1,16 +1,21 @@
-What's new in Iris 1.5
-**********************
+v1.5 (13 Sep 2013)
+******************
-:Release: 1.5.0
-:Date: 12 September 2013
-
-This document explains the new/changed features of Iris in version 1.5.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.5 features
-=================
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
+
* Scatter plots can now be produced using :func:`iris.plot.scatter` and
:func:`iris.quickplot.scatter`.
+
* The functions :func:`iris.plot.plot` and :func:`iris.quickplot.plot` now take
up to two arguments, which may be cubes or coordinates, allowing the user to
have full control over what is plotted on each axis. The coords keyword
@@ -25,7 +30,9 @@ Iris 1.5 features
* :class:`iris.analysis.SUM` is now a weighted aggregator, allowing it to take a
weights keyword argument.
+
* GRIB2 translations added for standard_name 'soil_temperature'.
+
* :meth:`iris.cube.Cube.slices` can now handle passing dimension index as well
as the currently supported types (string, coordinate), in order to slice in
cases where there is no coordinate associated with a dimension (a mix of
@@ -48,6 +55,7 @@ Iris 1.5 features
plt.show()
* Support for UM ancillary files truncated with the UM utility ieee
+
* Complete support for Transverse Mercator with saving to NetCDF also.
.. code-block:: python
@@ -70,18 +78,26 @@ Iris 1.5 features
.. image:: images/transverse_merc.png
* Support for loading NAME files (gridded and trajectory data).
+
* Multi-dimensional coordinate support added for
:func:`iris.analysis.cartography.cosine_latitude_weights`
+
* Added limited packaged GRIB support (bulletin headers).
+
* In-place keyword added to :func:`iris.analysis.maths.divide` and
:func:`iris.analysis.maths.multiply`.
+
* Performance gains for PP loading of the order of 40%.
+
* :mod:`iris.quickplot` now has a :func:`~iris.quickplot.show` function to
provide convenient access to matplotlib.pyplot.show().
+
* :meth:`iris.coords.DimCoord.from_regular` now implemented which creates a
:class:`~iris.coords.DimCoord` with regularly spaced points, and optionally
bounds.
+
* Iris can now cope with a missing bounds variable from NetCDF files.
+
* Added support for bool array indexing on a cube.
.. code-block:: python
@@ -95,73 +111,95 @@ Iris 1.5 features
* Added support for loading fields defined on regular Gaussian grids from GRIB
files.
+
* :func:`iris.analysis.interpolate.extract_nearest_neighbour` now works
without needing to load the data (especially relevant to large datasets).
+
* When using plotting routines from :mod:`iris.plot` or :mod:`iris.quickplot`,
the direction of vertical axes will be reversed if the corresponding
coordinate has a "positive" attribute set to "down".
- see: :ref:`Oceanography-atlantic_profiles`
+ see: :ref:`sphx_glr_generated_gallery_oceanography_plot_atlantic_profiles.py`
* New PP stashcode translations added including 'dewpoint' and
'relative_humidity'.
+
* Added implied heights for several common PP STASH codes.
+
* GeoTIFF export capability enhanced for supporting various data types, coord
systems and mapping 0 to 360 longitudes to the -180 to 180 range.
Bugs fixed
-----------
+==========
+
* NetCDF error handling on save has been extended to capture file path and
permission errors.
+
* Shape of the Earth scale factors are now correctly interpreted by the GRIB
loader. They were previously used as a multiplier for the given value but
should have been used as a decimal shift.
+
* OSGB definition corrected.
+
* Transverse Mercator on load now accepts the following interchangeably due to
inconsistencies in CF documentation:
- * +scale_factor_at_central_meridian <-> scale_factor_at_projection_origin
- * +longitude_of_central_meridian <-> longitude_of_projection_origin
- (+recommended encoding)
+
+ * +scale_factor_at_central_meridian <-> scale_factor_at_projection_origin
+
+ * +longitude_of_central_meridian <-> longitude_of_projection_origin
+ (+recommended encoding)
+
* Ellipse description now maintained when converting GeogCS to cartopy.
+
* GeoTIFF export bug fixes.
+
* Polar axis now set to the North Pole, when a cube with no coordinate system
is saved to the PP file-format.
+
* :meth:`iris.coords.DimCoord.from_coord` and
:meth:`iris.coords.AuxCoord.from_coord` now correctly returns a copy of the
source coordinate's coordinate system.
+
* Units part of the axis label is now omitted when the coordinate it represents
is given as a time reference (:mod:`iris.quickplot`).
+
* CF dimension coordinate is now maintained in the resulting cube when a cube
with CF dimension coordinate is being aggregated over.
+
* Units for Lambert conformal and polar stereographic coordinates now defined as
meters.
+
* Various fieldsfile load bugs including failing to read the coordinates from
the file have been fixed.
+
* Coding of maximum and minimum time-stats in GRIB2 saving has been fixed.
-* Example code in section 4.1 of the userguide updated so it uses a sample
+
+* Example code in section 4.1 of the user guide updated so it uses a sample
data file that exists.
+
* Zorder of contour lines drawn by :func:`~iris.plot.contourf` has been changed
to address issue of objects appearing in-between line and filled contours.
+
* Coord comparisons now function correctly when comparing to numpy scalars.
+
* Cube loading constraints and :meth:`iris.cube.Cube.extract` correctly
implement cell equality methods.
-Incompatible changes
---------------------
-* N/A
-
Deprecations
-------------
+============
+
* The coords keyword argument for :func:`iris.plot.plot` and
:func:`iris.quickplot.plot` has been deprecated due to the new API which
accepts multiple cubes or coordinates.
+
* :meth:`iris.fileformats.pp.PPField.regular_points` and
:meth:`iris.fileformats.pp.PPField.regular_bounds` have now been deprecated
in favour of a new factory method
:meth:`iris.coords.DimCoord.from_regular()`.
+
* :func:`iris.fileformats.pp.add_load_rules` and
:func:`iris.fileformats.grib.add_load_rules` are now deprecated.
diff --git a/docs/iris/src/whatsnew/1.6.rst b/docs/iris/src/whatsnew/1.6.rst
index 4b540c6cc9..068311db5f 100644
--- a/docs/iris/src/whatsnew/1.6.rst
+++ b/docs/iris/src/whatsnew/1.6.rst
@@ -1,14 +1,17 @@
-What's new in Iris 1.6
-**********************
+v1.6 (26 Jan 2014)
+******************
-:Release: 1.6.1
-:Date: 18th February 2014
-
-This document explains the new/changed features of Iris in version 1.6.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.6 features
-=================
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
.. _showcase:
@@ -29,9 +32,9 @@ Iris 1.6 features
>>> print([str(cell) for cell in coord.cells()])
['1970-01-01 01:00:00', '1970-01-01 02:00:00', '1970-01-01 03:00:00']
- Note that, either a :class:`datetime.datetime` or :class:`netcdftime.datetime`
- object instance will be returned, depending on the calendar of the time
- reference coordinate.
+ Note that, either a :class:`datetime.datetime` or
+ :class:`netcdftime.datetime` object instance will be returned, depending on
+ the calendar of the time reference coordinate.
This capability permits the ability to express time constraints more
naturally when the cell represents a *datetime-like* object.
@@ -41,8 +44,10 @@ Iris 1.6 features
# Ignore the 1st of January.
iris.Constraint(time=lambda cell: cell.point.month != 1 and cell.point.day != 1)
- Note that, :class:`iris.Future` also supports a `context manager `_
- which allows multiple sections of code to execute with different run-time behaviour.
+ Note that, :class:`iris.Future` also supports a
+ `context manager `_
+ which allows multiple sections of code to execute with different run-time
+ behaviour.
.. code-block:: python
@@ -63,12 +68,12 @@ Iris 1.6 features
:class:`datetime.datetime` or :class:`netcdftime.datetime`.
The *year, month, day, hour, minute, second* and *microsecond* attributes of
- a :class:`iris.time.PartialDateTime` object may be fully or partially specified
- for any given comparison.
+ a :class:`iris.time.PartialDateTime` object may be fully or partially
+ specified for any given comparison.
This is particularly useful for time based constraints, whilst enabling the
- :data:`iris.FUTURE.cell_datetime_objects`, see :ref:`here ` for further
- details on this new release feature.
+ :data:`iris.FUTURE.cell_datetime_objects`, see :ref:`here ` for
+ further details on this new release feature.
.. code-block:: python
@@ -85,139 +90,64 @@ Iris 1.6 features
* GRIB loading supports latitude/longitude or Gaussian reduced grids for
version 1 and version 2.
+
* :ref:`A new utility function to assist with caching`.
+
* :ref:`The RMS aggregator supports weights`.
+
* :ref:`A new experimental function to equalise cube attributes`.
+
* :ref:`Collapsing a cube provides a tolerance level for missing-data`.
+
* NAME loading supports vertical coordinates.
+
* UM land/sea mask de-compression for Fieldsfiles and PP files.
+
* Lateral boundary condition Fieldsfile support.
+
* Staggered grid support for Fieldsfiles extended to type 6 (Arakawa C grid
with v at poles).
+
* Extend support for Fieldsfiles with grid codes 11, 26, 27, 28 and 29.
+
* :ref:`Promoting a scalar coordinate to new leading cube dimension`.
+
* Interpreting cell methods from NAME.
+
* GRIB2 export without forecast_period, enabling NAME to GRIB2.
+
* Loading height levels from GRIB2.
+
* :func:`iris.coord_categorisation.add_categorised_coord` now supports
multi-dimensional coordinate categorisation.
-* Fieldsfiles and PP support for loading and saving of air potential temperature.
+
+* Fieldsfiles and PP support for loading and saving of air potential
+ temperature.
+
* :func:`iris.experimental.regrid.regrid_weighted_curvilinear_to_rectilinear`
regrids curvilinear point data to a target rectilinear grid using associated
area weights.
-* Extended capability of the NetCDF saver :meth:`iris.fileformats.netcdf.Saver.write`
- for fine-tune control of a :mod:`netCDF4.Variable`. Also allows multiple dimensions
- to be nominated as *unlimited*.
-* :ref:`A new PEAK aggregator providing spline interpolation`.
-* A new utility function :func:`iris.util.broadcast_to_shape`.
-* A new utility function :func:`iris.util.as_compatible_shape`.
-* Iris tests can now be run on systems where directory write permissions
- previously did not allow it. This is achieved by writing to the current working
- directory in such cases.
-* Support for 365 day calendar PP fields.
-* Added phenomenon translation between cf and grib2 for wind (from) direction.
-* PP files now retain lbfc value on save, derived from the stash attribute.
-Bugs fixed
-==========
-* :meth:`iris.cube.Cube.rolling_window` has been extended to support masked arrays.
-* :meth:`iris.cube.Cube.collapsed` now handles string coordinates.
-* Default LBUSER(2) to -99 for Fieldsfile and PP saving.
-* :func:`iris.util.monotonic` returns the correct direction.
-* File loaders correctly parse filenames containing colons.
-* ABF loader now correctly loads the ABF data payload once.
-* Support for 1D array :data:`iris.cube.cube.attributes`.
-* GRIB bounded level saving fix.
-* :func:`iris.analysis.cartography.project` now associates a coordinate system
- with the resulting target cube, where applicable.
-* :func:`iris.util.array_equal` now correctly ignores any mask if present,
- matching the behaviour of :func:`numpy.array_equal` except with string array
- support.
-* :func:`iris.analysis.interpolate.linear` now retains a mask in the resulting
- cube.
-* :meth:`iris.coords.DimCoord.from_regular` now correctly returns a coordinate
- which will always be regular as indicated by :func:`~iris.util.is_regular`.
-* :func:`iris.util.rolling_window` handling of masked arrays (degenerate
- masks) fixed.
-* Exception no longer raised for any ellipsoid definition in nimrod loading.
+* Extended capability of the NetCDF saver
+ :meth:`iris.fileformats.netcdf.Saver.write` for fine-tune control of a
+ :mod:`netCDF4.Variable`. Also allows multiple dimensions to be nominated as
+ *unlimited*.
-Incompatible changes
-====================
-* The experimental 'concatenate' function is now a method of a
- :class:`iris.cube.CubeList`, see :meth:`iris.cube.CubeList.concatenate`. The
- functionality is unchanged.
-* :meth:`iris.cube.Cube.extract_by_trajectory()` has been removed.
- Instead, use :func:`iris.analysis.trajectory.interpolate()`.
-* :func:`iris.load_strict()` has been removed.
- Instead, use :func:`iris.load_cube()` and :func:`iris.load_cubes()`.
-* :meth:`iris.coords.Coord.cos()` and :meth:`iris.coords.Coord.sin()`
- have been removed.
-* :meth:`iris.coords.Coord.unit_converted()` has been removed.
- Instead, make a copy of the coordinate using
- :meth:`iris.coords.Coord.copy()` and then call the
- :meth:`iris.coords.Coord.convert_units()` method of the new
- coordinate.
-* Iteration over a :class:`~iris.cube.Cube` has been removed. Instead,
- use :meth:`iris.cube.Cube.slices()`.
-* The following :class:`~iris.unit.Unit` deprecated methods/properties have been removed.
-
- ====================================== ===========================================
- Removed property/method New method
- ====================================== ===========================================
- :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()`
- :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()`
- :attr:`~iris.unit.Unit.no_unit` :meth:`~iris.unit.Unit.is_no_unit()`
- :attr:`~iris.unit.Unit.time_reference` :meth:`~iris.unit.Unit.is_time_reference()`
- :attr:`~iris.unit.Unit.unknown` :meth:`~iris.unit.Unit.is_unknown()`
- ====================================== ===========================================
-* As a result of deprecating :meth:`iris.cube.Cube.add_history` and removing the
- automatic appending of history by operations such as cube arithmetic,
- collapsing, and aggregating, the signatures of a number of functions within
- :mod:`iris.analysis.maths` have been modified along with that of
- :class:`iris.analysis.Aggregator` and :class:`iris.analysis.WeightedAggregator`.
-* The experimental ABF and ABL functionality has now been promoted to
- core functionality in :mod:`iris.fileformats.abf`.
-* The following :mod:`iris.coord_categorisation` deprecated functions have been
- removed.
+* :ref:`A new PEAK aggregator providing spline interpolation`.
- =============================================================== =======================================================
- Removed function New function
- =============================================================== =======================================================
- :func:`~iris.coord_categorisation.add_custom_season` :func:`~iris.coord_categorisation.add_season`
- :func:`~iris.coord_categorisation.add_custom_season_number` :func:`~iris.coord_categorisation.add_season_number`
- :func:`~iris.coord_categorisation.add_custom_season_year` :func:`~iris.coord_categorisation.add_season_year`
- :func:`~iris.coord_categorisation.add_custom_season_membership` :func:`~iris.coord_categorisation.add_season_membership`
- :func:`~iris.coord_categorisation.add_month_shortname` :func:`~iris.coord_categorisation.add_month`
- :func:`~iris.coord_categorisation.add_weekday_shortname` :func:`~iris.coord_categorisation.add_weekday`
- :func:`~iris.coord_categorisation.add_season_month_initials` :func:`~iris.coord_categorisation.add_season`
- =============================================================== =======================================================
-* When a cube is loaded from PP or GRIB and it has both time and forecast period
- coordinates, and the time coordinate has bounds, the forecast period coordinate
- will now also have bounds. These bounds will be aligned with the bounds of the
- time coordinate taking into account the forecast reference time. Also,
- the forecast period point will now be aligned with the time point.
+* A new utility function :func:`iris.util.broadcast_to_shape`.
-Deprecations
-============
-* :meth:`iris.cube.Cube.add_history` has been deprecated in favour
- of users modifying/creating the history metadata directly. This is
- because the automatic behaviour did not deliver a sufficiently complete,
- auditable history and often prevented the merging of cubes.
-* :func:`iris.util.broadcast_weights` has been deprecated and replaced
- by the new utility function :func:`iris.util.broadcast_to_shape`.
-* Callback mechanism `iris.run_callback` has had its deprecation of return
- values revoked. The callback can now return cube instances as well as
- inplace changes to the cube.
+* A new utility function :func:`iris.util.as_compatible_shape`.
-New Contributors
-================
-Congratulations and thank you to `felicityguest `_, `jkettleb `_,
-`kwilliams-mo `_ and `shoyer `_ who all made their first contribution
-to Iris!
+* Iris tests can now be run on systems where directory write permissions
+ previously did not allow it. This is achieved by writing to the current
+ working directory in such cases.
+* Support for 365 day calendar PP fields.
-----
+* Added phenomenon translation between cf and grib2 for wind (from) direction.
+* PP files now retain lbfc value on save, derived from the stash attribute.
.. _caching:
@@ -249,7 +179,8 @@ consuming processing, or to reap the benefit of fast-loading a pickled cube.
.. _rms:
The RMS aggregator supports weights
-===================================
+-----------------------------------
+
The :data:`iris.analysis.RMS` aggregator has been extended to allow the use of
weights using the new keyword argument :data:`weights`.
@@ -264,7 +195,8 @@ For example, an RMS weighted cube collapse is performed as follows:
.. _equalise:
Equalise cube attributes
-========================
+------------------------
+
To assist with :class:`iris.cube.Cube` merging, the new experimental in-place
function :func:`iris.experimental.equalise_cubes.equalise_attributes` ensures
that a sequence of cubes contains a common set of :data:`iris.cube.Cube.attributes`.
@@ -276,7 +208,8 @@ have the same attributes.
.. _tolerance:
Masking a collapsed result by missing-data tolerance
-====================================================
+----------------------------------------------------
+
The result from collapsing masked cube data may now be completely
masked by providing a :data:`mdtol` missing-data tolerance keyword
to :meth:`iris.cube.Cube.collapsed`.
@@ -289,7 +222,8 @@ less than or equal to the provided tolerance.
.. _promote:
Promote a scalar coordinate
-===========================
+---------------------------
+
The new utility function :func:`iris.util.new_axis` creates a new cube with
a new leading dimension of size unity. If a scalar coordinate is provided, then
the scalar coordinate is promoted to be the dimension coordinate for the new
@@ -301,7 +235,8 @@ Note that, this function will load the data payload of the cube.
.. _peak:
A new PEAK aggregator providing spline interpolation
-====================================================
+----------------------------------------------------
+
The new :data:`iris.analysis.PEAK` aggregator calculates the global peak
value from a spline interpolation of the :class:`iris.cube.Cube` data payload
along a nominated coordinate axis.
@@ -312,3 +247,138 @@ For example, to calculate the peak time:
from iris.analysis import PEAK
collapsed_cube = cube.collapsed('time', PEAK)
+
+
+Bugs fixed
+==========
+
+* :meth:`iris.cube.Cube.rolling_window` has been extended to support masked
+ arrays.
+
+* :meth:`iris.cube.Cube.collapsed` now handles string coordinates.
+
+* Default LBUSER(2) to -99 for Fieldsfile and PP saving.
+
+* :func:`iris.util.monotonic` returns the correct direction.
+
+* File loaders correctly parse filenames containing colons.
+
+* ABF loader now correctly loads the ABF data payload once.
+
+* Support for 1D array :data:`iris.cube.cube.attributes`.
+
+* GRIB bounded level saving fix.
+
+* :func:`iris.analysis.cartography.project` now associates a coordinate system
+ with the resulting target cube, where applicable.
+
+* :func:`iris.util.array_equal` now correctly ignores any mask if present,
+ matching the behaviour of :func:`numpy.array_equal` except with string array
+ support.
+
+* :func:`iris.analysis.interpolate.linear` now retains a mask in the resulting
+ cube.
+
+* :meth:`iris.coords.DimCoord.from_regular` now correctly returns a coordinate
+ which will always be regular as indicated by :func:`~iris.util.is_regular`.
+
+* :func:`iris.util.rolling_window` handling of masked arrays (degenerate
+ masks) fixed.
+
+* Exception no longer raised for any ellipsoid definition in nimrod loading.
+
+
+Incompatible changes
+====================
+
+* The experimental 'concatenate' function is now a method of a
+ :class:`iris.cube.CubeList`, see :meth:`iris.cube.CubeList.concatenate`. The
+ functionality is unchanged.
+
+* :meth:`iris.cube.Cube.extract_by_trajectory()` has been removed.
+ Instead, use :func:`iris.analysis.trajectory.interpolate()`.
+
+* :func:`iris.load_strict()` has been removed.
+ Instead, use :func:`iris.load_cube()` and :func:`iris.load_cubes()`.
+
+* :meth:`iris.coords.Coord.cos()` and :meth:`iris.coords.Coord.sin()`
+ have been removed.
+
+* :meth:`iris.coords.Coord.unit_converted()` has been removed.
+ Instead, make a copy of the coordinate using
+ :meth:`iris.coords.Coord.copy()` and then call the
+ :meth:`iris.coords.Coord.convert_units()` method of the new
+ coordinate.
+
+* Iteration over a :class:`~iris.cube.Cube` has been removed. Instead,
+ use :meth:`iris.cube.Cube.slices()`.
+
+* The following :class:`~iris.unit.Unit` deprecated methods/properties have
+ been removed.
+
+ ====================================== ===========================================
+ Removed property/method New method
+ ====================================== ===========================================
+ :meth:`~iris.unit.Unit.convertible()` :meth:`~iris.unit.Unit.is_convertible()`
+ :attr:`~iris.unit.Unit.dimensionless` :meth:`~iris.unit.Unit.is_dimensionless()`
+ :attr:`~iris.unit.Unit.no_unit` :meth:`~iris.unit.Unit.is_no_unit()`
+ :attr:`~iris.unit.Unit.time_reference` :meth:`~iris.unit.Unit.is_time_reference()`
+ :attr:`~iris.unit.Unit.unknown` :meth:`~iris.unit.Unit.is_unknown()`
+ ====================================== ===========================================
+
+* As a result of deprecating :meth:`iris.cube.Cube.add_history` and removing the
+ automatic appending of history by operations such as cube arithmetic,
+ collapsing, and aggregating, the signatures of a number of functions within
+ :mod:`iris.analysis.maths` have been modified along with that of
+ :class:`iris.analysis.Aggregator` and
+ :class:`iris.analysis.WeightedAggregator`.
+
+* The experimental ABF and ABL functionality has now been promoted to
+ core functionality in :mod:`iris.fileformats.abf`.
+
+* The following :mod:`iris.coord_categorisation` deprecated functions have been
+ removed.
+
+ =============================================================== =======================================================
+ Removed function New function
+ =============================================================== =======================================================
+ :func:`~iris.coord_categorisation.add_custom_season` :func:`~iris.coord_categorisation.add_season`
+ :func:`~iris.coord_categorisation.add_custom_season_number` :func:`~iris.coord_categorisation.add_season_number`
+ :func:`~iris.coord_categorisation.add_custom_season_year` :func:`~iris.coord_categorisation.add_season_year`
+ :func:`~iris.coord_categorisation.add_custom_season_membership` :func:`~iris.coord_categorisation.add_season_membership`
+ :func:`~iris.coord_categorisation.add_month_shortname` :func:`~iris.coord_categorisation.add_month`
+ :func:`~iris.coord_categorisation.add_weekday_shortname` :func:`~iris.coord_categorisation.add_weekday`
+ :func:`~iris.coord_categorisation.add_season_month_initials` :func:`~iris.coord_categorisation.add_season`
+ =============================================================== =======================================================
+
+* When a cube is loaded from PP or GRIB and it has both time and forecast period
+ coordinates, and the time coordinate has bounds, the forecast period
+ coordinate will now also have bounds. These bounds will be aligned with the
+ bounds of the time coordinate taking into account the forecast reference
+ time. Also, the forecast period point will now be aligned with the time point.
+
+
+Deprecations
+============
+
+* :meth:`iris.cube.Cube.add_history` has been deprecated in favour
+ of users modifying/creating the history metadata directly. This is
+ because the automatic behaviour did not deliver a sufficiently complete,
+ auditable history and often prevented the merging of cubes.
+
+* :func:`iris.util.broadcast_weights` has been deprecated and replaced
+ by the new utility function :func:`iris.util.broadcast_to_shape`.
+
+* Callback mechanism `iris.run_callback` has had its deprecation of return
+ values revoked. The callback can now return cube instances as well as
+ inplace changes to the cube.
+
+
+New Contributors
+================
+Congratulations and thank you to
+`felicityguest `_,
+`jkettleb `_,
+`kwilliams-mo `_ and
+`shoyer `_ who all made their first contribution
+to Iris!
diff --git a/docs/iris/src/whatsnew/1.7.rst b/docs/iris/src/whatsnew/1.7.rst
index 2f3a52fbb9..e60c1083d9 100644
--- a/docs/iris/src/whatsnew/1.7.rst
+++ b/docs/iris/src/whatsnew/1.7.rst
@@ -1,22 +1,26 @@
-What's new in Iris 1.7
-**********************
+v1.7 (04 Jul 2014)
+********************
-This document explains the new/changed features of Iris in version 1.7.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-:Release: 1.7.4
-:Date: 15th April 2015
-Iris 1.7 features
-=================
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
.. _showcase:
.. admonition:: Showcase: Iris is making use of Biggus
- Iris is now making extensive use of `Biggus `_
- for virtual arrays and lazy array evaluation. In practice this means that analyses
- of cubes with data bigger than the available system memory are now possible.
+ Iris is now making extensive use of
+ `Biggus `_ for virtual arrays and lazy
+ array evaluation. In practice this means that analyses of cubes with data
+ bigger than the available system memory are now possible.
Other than the improved functionality the changes are mostly
transparent; for example, before the introduction of biggus, MemoryErrors
@@ -33,20 +37,20 @@ Iris 1.7 features
>>> print(type(result))
- Memory is still a limiting factor if ever the data is desired as a NumPy array
- (e.g. via :data:`cube.data `), but additional methods have
- been added to the Cube to support querying and subsequently accessing the "lazy"
- data form (see :meth:`~iris.cube.Cube.has_lazy_data` and
- :meth:`~iris.cube.Cube.lazy_data`).
+ Memory is still a limiting factor if ever the data is desired as a NumPy
+ array (e.g. via :data:`cube.data `), but additional
+ methods have been added to the Cube to support querying and subsequently
+ accessing the "lazy" data form (see :meth:`~iris.cube.Cube.has_lazy_data`
+ and :meth:`~iris.cube.Cube.lazy_data`).
.. admonition:: Showcase: New interpolation and regridding API
- New interpolation and regridding interfaces have been added which simplify and
- extend the existing functionality.
+ New interpolation and regridding interfaces have been added which simplify
+ and extend the existing functionality.
The interfaces are exposed on the cube in the form of the
- :meth:`~iris.cube.Cube.interpolate` and :meth:`~iris.cube.Cube.regrid` methods.
- Conceptually the signatures of the methods are::
+ :meth:`~iris.cube.Cube.interpolate` and :meth:`~iris.cube.Cube.regrid`
+ methods. Conceptually the signatures of the methods are::
interpolated_cube = cube.interpolate(interpolation_points, interpolation_scheme)
@@ -55,16 +59,17 @@ Iris 1.7 features
regridded_cube = cube.regrid(target_grid_cube, regridding_scheme)
Whilst not all schemes have been migrated to the new interface,
- :class:`iris.analysis.Linear` defines both linear interpolation and regridding,
- and :class:`iris.analysis.AreaWeighted` defines an area weighted regridding
- scheme.
+ :class:`iris.analysis.Linear` defines both linear interpolation and
+ regridding, and :class:`iris.analysis.AreaWeighted` defines an area weighted
+ regridding scheme.
.. admonition:: Showcase: Merge and concatenate reporting
Merge reporting is designed as an aid to the merge processes. Should merging
- a :class:`~iris.cube.CubeList` fail, merge reporting means that a descriptive
- error will be raised that details the differences between the cubes in the
- :class:`~iris.cube.CubeList` that prevented the merge from being successful.
+ a :class:`~iris.cube.CubeList` fail, merge reporting means that a
+ descriptive error will be raised that details the differences between the
+ cubes in the :class:`~iris.cube.CubeList` that prevented the merge from
+ being successful.
A new :class:`~iris.cube.CubeList` method, called
:meth:`~iris.cube.CubeList.merge_cube`, has been introduced. Calling it on a
@@ -83,8 +88,8 @@ Iris 1.7 features
iris.exceptions.MergeError: failed to merge into a single cube.
cube.attributes keys differ: 'foo'
- The naming of this new method mirrors that of Iris load functions, where
- one would always expect a :class:`~iris.cube.CubeList` from :func:`iris.load`
+ The naming of this new method mirrors that of Iris load functions, where one
+ would always expect a :class:`~iris.cube.CubeList` from :func:`iris.load`
and a :class:`~iris.cube.Cube` from :func:`iris.load_cube`.
Concatenate reporting is the equivalent process for concatenating a
@@ -101,10 +106,10 @@ Iris 1.7 features
However, the additional richness of Iris coordinate meta-data provides an
enhanced capability beyond the basic broadcasting behaviour of NumPy.
- This means that when performing cube arithmetic, the dimensionality and shape of
- cubes no longer need to match. For example, if the dimensionality of a cube is
- reduced by collapsing, then the result can be used to subtract from the original
- cube to calculate an anomaly::
+ This means that when performing cube arithmetic, the dimensionality and
+ shape of cubes no longer need to match. For example, if the dimensionality
+ of a cube is reduced by collapsing, then the result can be used to subtract
+ from the original cube to calculate an anomaly::
>>> time_mean = original_cube.collapsed('time', iris.analysis.MEAN)
>>> mean_anomaly = original_cube - time_mean
@@ -117,132 +122,218 @@ Iris 1.7 features
>>> zero_cube = original_cube - similar_cube
* Merge reporting that raises a descriptive error if the merge process fails.
-* Linear interpolation and regridding now make use of SciPy's RegularGridInterpolator
- for much faster linear interpolation.
+
+* Linear interpolation and regridding now make use of SciPy's
+ RegularGridInterpolator for much faster linear interpolation.
+
* NAME file loading now handles the "no time averaging" column and translates
- height/altitude above ground/sea-level columns into appropriate coordinate metadata.
-* The NetCDF saver has been extended to allow saving of cubes with hybrid pressure
- auxiliary factories.
-* PP/FF loading supports LBLEV of 9999.
-* Extended GRIB1 loading to support data on hybrid pressure levels.
-* :func:`iris.coord_categorisation.add_day_of_year` can be used to add categorised
- day of year coordinates based on time coordinates with non-Gregorian calendars.
+ height/altitude above ground/sea-level columns into appropriate coordinate
+ metadata.
+
+* The NetCDF saver has been extended to allow saving of cubes with hybrid
+ pressure auxiliary factories.
+
+* PP/FF loading supports LBLEV of 9999.
+
+* Extended GRIB1 loading to support data on hybrid pressure levels.
+
+* :func:`iris.coord_categorisation.add_day_of_year` can be used to add
+ categorised day of year coordinates based on time coordinates with
+ non-Gregorian calendars.
+
* Support for loading data on reduced grids from GRIB files in raw form without
automatically interpolating to a regular grid.
+
* The coordinate systems :class:`iris.coord_systems.Orthographic` and
- :class:`iris.coord_systems.VerticalPerspective` (for imagery from geostationary
- satellites) have been added.
-* Extended NetCDF loading to support the "ocean sigma over z" auxiliary coordinate
+ :class:`iris.coord_systems.VerticalPerspective` (for imagery from
+ geostationary satellites) have been added.
+
+* Extended NetCDF loading to support the "ocean sigma over z" auxiliary
+ coordinate
factory.
+
* Support added for loading CF-NetCDF data with bounds arrays that are missing a
vertex dimension.
+
* :meth:`iris.cube.Cube.rolling_window` can now be used with string-based
:class:`iris.coords.AuxCoord` instances.
+
* Loading of PP and FF files has been optimised through deferring creation of
PPField attributes.
+
* Automatic association of a coordinate's CF formula terms variable with the
data variable associated with that coordinate.
-* PP loading translates cross-section height into a dimensional auxiliary coordinate.
-* String auxiliary coordinates can now be plotted with the Iris plotting wrappers.
-* :func:`iris.analysis.geometry.geometry_area_weights` now allows for the calculation of
- normalized cell weights.
-* Many new translations between the CF spec and STASH codes or GRIB2 parameter codes.
-* PP save rules add the data's UM Version to the attributes of the saved file
- when appropriate.
+
+* PP loading translates cross-section height into a dimensional auxiliary
+ coordinate.
+
+* String auxiliary coordinates can now be plotted with the Iris
+ plotting wrappers.
+
+* :func:`iris.analysis.geometry.geometry_area_weights` now
+ allows for the calculation of normalized cell weights.
+
+* Many new translations between the CF spec and STASH codes or GRIB2 parameter
+ codes.
+
+* PP save rules add the data's UM Version to the attributes of the saved
+ file when appropriate.
+
* NetCDF reference surface variable promotion available through the
:class:`iris.FUTURE` mechanism.
-* A speed improvement in calculation of :func:`iris.analysis.geometry.geometry_area_weights`.
-* The mdtol keyword was added to area-weighted regridding to allow control of the
- tolerance for missing data. For a further description of this concept, see
+
+* A speed improvement in calculation of
+ :func:`iris.analysis.geometry.geometry_area_weights`.
+
+* The mdtol keyword was added to area-weighted regridding to allow control of
+ the tolerance for missing data. For a further description of this concept, see
:class:`iris.analysis.AreaWeighted`.
+
* Handling for patching of the CF conventions global attribute via a defined
cf_patch_conventions function.
-* Deferred GRIB data loading has been introduced for reduced memory consumption when
- loading GRIB files.
+
+* Deferred GRIB data loading has been introduced for reduced memory consumption
+ when loading GRIB files.
+
* Concatenate reporting that raises a descriptive error if the concatenation
process fails.
+
* A speed improvement when loading PP or FF data and constraining on STASH code.
+
Bugs fixed
==========
+
* Data containing more than one reference cube for constructing hybrid height
coordinates can now be loaded.
+
* Removed cause of increased margin of error when interpolating.
+
* Changed floating-point precision used when wrapping points for interpolation.
+
* Mappables that can be used to generate colorbars are now returned by Iris
plotting wrappers.
-* NetCDF load ignores over-specified formula terms on bounded dimensionless vertical
- coordinates.
+
+* NetCDF load ignores over-specified formula terms on bounded dimensionless
+ vertical coordinates.
+
* Auxiliary coordinate factory loading now correctly interprets formula term
- varibles for "atmosphere hybrid sigma pressure" coordinate data.
+ variables for "atmosphere hybrid sigma pressure" coordinate data.
+
* Corrected comparison of NumPy NaN values in cube merge process.
-* Fixes for :meth:`iris.cube.Cube.intersection` to correct calculating the intersection
- of a cube with split bounds, handling of circular coordinates, handling of
- monotonically descending bounded coordinats and for finding a wrapped two-point
- result and longitude tolerances.
-* A bug affecting :meth:`iris.cube.Cube.extract` and :meth:`iris.cube.CubeList.extract`
- that led to unexpected behaviour when operating on scalar cubes has been fixed.
-* Aggregate_by may now be passed single-value coordinates.
-* Making a copy of a :class:`iris.coords.DimCoord` no longer results in the writeable
- flag on the copied points and bounds arrays being set to True.
-* Can now save to PP a cube that has vertical levels but no orography.
+
+* Fixes for :meth:`iris.cube.Cube.intersection` to correct calculating the
+ intersection of a cube with split bounds, handling of circular coordinates,
+ handling of monotonically descending bounded coordinates and for finding a
+ wrapped two-point result and longitude tolerances.
+
+* A bug affecting :meth:`iris.cube.Cube.extract` and
+ :meth:`iris.cube.CubeList.extract` that led to unexpected behaviour when
+ operating on scalar cubes has been fixed.
+
+* Aggregate_by may now be passed single-value coordinates.
+
+* Making a copy of a :class:`iris.coords.DimCoord` no longer results in the
+ writeable flag on the copied points and bounds arrays being set to True.
+
+* Can now save to PP a cube that has vertical levels but no orography.
+
* Fix a bug causing surface altitude and surface pressure fields to not appear
in cubes loaded with a STASH constraint.
-* Fixed support for :class:`iris.fileformats.pp.STASH` objects in STASH constraints.
-* A fix to avoid a problem where cube attribute names clash with NetCDF reserved attribute names.
-* A fix to allow :meth:`iris.cube.CubeList.concatenate` to deal with descending coordinate order.
-* Add missing NetCDF attribute `varname` when constructing a new :class:`iris.coords.AuxCoord`.
-* The datatype of time arrays converted with :func:`iris.util.unify_time_units` is now preserved.
-Bugs fixed in v1.7.3
+* Fixed support for :class:`iris.fileformats.pp.STASH` objects in STASH
+ constraints.
+
+* A fix to avoid a problem where cube attribute names clash with
+ NetCDF reserved attribute names.
+
+* A fix to allow :meth:`iris.cube.CubeList.concatenate` to deal with descending
+ coordinate order.
+
+* Add missing NetCDF attribute `varname` when constructing a new
+ :class:`iris.coords.AuxCoord`. * The datatype of time arrays converted with
+ :func:`iris.util.unify_time_units` is now preserved.
+
+
+v1.7.3 (16 Dec 2014)
^^^^^^^^^^^^^^^^^^^^
-* Scalar dimension coordinates can now be concatenated with :meth:`iris.cube.CubeList.concatenate`.
-* Arbitrary names can no longer be set for elements of a :class:`iris.fileformats.pp.SplittableInt`.
-* Cubes that contain a pseudo-level coordinate can now be saved to PP.
-* Fixed a bug in the FieldsFile loader that prevented it always loading all available fields.
-Bugs fixed in v1.7.4
+* Scalar dimension coordinates can now be concatenated with
+ :meth:`iris.cube.CubeList.concatenate`.
+
+* Arbitrary names can no longer be set
+ for elements of a :class:`iris.fileformats.pp.SplittableInt`.
+
+* Cubes that contain a pseudo-level coordinate can now be saved to PP.
+
+* Fixed a bug in the FieldsFile loader that prevented it always loading all
+ available fields.
+
+
+v1.7.4 (15 Apr 2015)
^^^^^^^^^^^^^^^^^^^^
+
* :meth:`Coord.guess_bounds` can now deal with circular coordinates.
+
* :meth:`Coord.nearest_neighbour_index` can now work with descending bounds.
+
* Passing `weights` to :meth:`Cube.rolling_window` no longer prevents other
keyword arguments from being passed to the aggregator.
+
* Several minor fixes to allow use of Iris on Windows.
+
* Made use of the new standard_parallels keyword in Cartopy's LambertConformal
projection (Cartopy v0.12). Older versions of Iris will not be able to
create LambertConformal coordinate systems with Cartopy >= 0.12.
+
Incompatible changes
====================
+
* Saving a cube with a STASH attribute to NetCDF now produces a variable
with an attribute of "um_stash_source" rather than "ukmo__um_stash_source".
-* Cubes saved to NetCDF with a coordinate system referencing a spherical ellipsoid
- now result in the grid mapping variable containing only the "earth_radius" attribute,
- rather than the "semi_major_axis" and "semi_minor_axis".
-* Collapsing a cube over all of its dimensions now results in a scalar cube rather
- than a 1d cube.
+
+* Cubes saved to NetCDF with a coordinate system referencing a spherical
+ ellipsoid now result in the grid mapping variable containing only the
+ "earth_radius" attribute, rather than the "semi_major_axis" and
+ "semi_minor_axis".
+
+* Collapsing a cube over all of its dimensions now results in a scalar cube
+ rather than a 1d cube.
+
Deprecations
============
+
* :func:`iris.util.ensure_array` has been deprecated.
+
* Deprecated the :func:`iris.fileformats.pp.reset_load_rules` and
:func:`iris.fileformats.grib.reset_load_rules` functions.
+
* Matplotlib is no longer a core Iris dependency.
-Documentation Changes
-=====================
+
+Documentation
+=============
+
* New sections on :ref:`cube broadcasting ` and
:doc:`regridding and interpolation `
have been added to the :doc:`user guide `.
+
* An example demonstrating custom log-scale colouring has been added.
- See :ref:`General-anomaly_log_colouring`.
+ See :ref:`sphx_glr_generated_gallery_general_plot_anomaly_log_colouring.py`.
+
* An example demonstrating the creation of a custom
:class:`iris.analysis.Aggregator` has been added.
- See :ref:`General-custom_aggregation`.
+ See :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py`.
+
* An example of reprojecting data from 2D auxiliary spatial coordinates
- (such as that from the ORCA grid) has been added. See :ref:`General-orca_projection`.
-* A clarification of the behaviour of :func:`iris.analysis.calculus.differentiate`.
-* A new :doc:`"whitepapers" ` section has been added to the documentation along
- with the addition of a paper providing an :doc:`overview of the load process for UM-like
- fileformats (e.g. PP and Fieldsfile) `.
+ (such as that from the ORCA grid) has been added. See
+ :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py`.
+
+* A clarification of the behaviour of
+ :func:`iris.analysis.calculus.differentiate`.
+* A new :doc:`"Technical Papers" ` section has been added to
+ the documentation along with the addition of a paper providing an
+ :doc:`overview of the load process for UM-like fileformats (e.g. PP and Fieldsfile) `.
diff --git a/docs/iris/src/whatsnew/1.8.rst b/docs/iris/src/whatsnew/1.8.rst
index c763411ed8..17432d7267 100644
--- a/docs/iris/src/whatsnew/1.8.rst
+++ b/docs/iris/src/whatsnew/1.8.rst
@@ -1,14 +1,17 @@
-What's new in Iris 1.8
-**********************
+v1.8 (14 Apr 2015)
+********************
-:Release: 1.8.1
-:Date: 3rd June 2015
-
-This document explains the new/changed features of Iris in version 1.8.
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.8 features
-=================
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
.. _showcase:
@@ -38,14 +41,17 @@ Iris 1.8 features
.. admonition:: Showcase: Slices over a coordinate
- You can slice over one or more dimensions of a cube using :meth:`iris.cube.Cube.slices_over`.
- This provides similar functionality to :meth:`~iris.cube.Cube.slices` but with
- almost the opposite outcome.
+ You can slice over one or more dimensions of a cube using
+ :meth:`iris.cube.Cube.slices_over`.
+ This provides similar functionality to :meth:`~iris.cube.Cube.slices`
+ but with almost the opposite outcome.
- Using :meth:`~iris.cube.Cube.slices` to slice a cube on a selected dimension returns
- all possible slices of the cube with the selected dimension retaining its dimensionality.
- Using :meth:`~iris.cube.Cube.slices_over` to slice a cube on a selected
- dimension returns all possible slices of the cube over the selected dimension.
+ Using :meth:`~iris.cube.Cube.slices` to slice a cube on a selected
+ dimension returns all possible slices of the cube with the selected
+ dimension retaining its dimensionality. Using
+ :meth:`~iris.cube.Cube.slices_over` to slice a cube on a selected
+ dimension returns all possible slices of the cube over the selected
+ dimension.
To demonstrate this::
@@ -60,42 +66,65 @@ Iris 1.8 features
air_potential_temperature / (K) (model_level_number: 10; grid_latitude: 83; grid_longitude: 83)
-* :func:`iris.cube.CubeList.concatenate` now works with `biggus `_ arrays and so
+* :func:`iris.cube.CubeList.concatenate` now works with
+ `biggus `_ arrays and so
now supports concatenation of cubes with deferred data.
+
* Improvements to NetCDF saving through using biggus:
* A cube's lazy data payload will still be lazy after saving; the data will not
be loaded into memory by the save operation.
+
* Cubes with data payloads larger than system memory can now be saved to NetCDF
through biggus streaming the data to disk.
-* :func:`iris.util.demote_dim_coord_to_aux_coord` and :func:`iris.util.promote_aux_coord_to_dim_coord`
+* :func:`iris.util.demote_dim_coord_to_aux_coord` and
+ :func:`iris.util.promote_aux_coord_to_dim_coord`
allow a coordinate to be easily demoted or promoted within a cube.
-* :func:`iris.util.squeeze` removes all length 1 dimensions from a cube, and demotes
- any associated squeeze dimension :class:`~iris.coords.DimCoord` to be a scalar coordinate.
-* :meth:`iris.cube.Cube.slices_over`, which returns an iterator of all sub-cubes along a given
- coordinate or dimension index.
+
+* :func:`iris.util.squeeze` removes all length 1 dimensions from a cube, and
+ demotes any associated squeeze dimension :class:`~iris.coords.DimCoord` to be
+ a scalar coordinate.
+
+* :meth:`iris.cube.Cube.slices_over`, which returns an iterator of all
+ sub-cubes along a given coordinate or dimension index.
+
* :meth:`iris.cube.Cube.interpolate` now accepts datetime.datetime and
netcdftime.datetime instances for date or time coordinates.
-* Many new and updated translations between CF spec and STASH codes or GRIB2 parameter
- codes.
-* PP/FF loader creates a height coordinate at 1.5m or 10m for certain relevant stash codes.
-* Lazy aggregator support for the :class:`standard deviation `
- aggregator has been added.
-* A speed improvement in calculation of :func:`iris.analysis.cartography.area_weights`.
-* Experimental support for unstructured grids has been added with :func:`iris.experimental.ugrid`.
- This has been implemented using `UGRID `_.
-* :meth:`iris.cube.CubeList.extract_overlapping` supports extraction of cubes over
- regions where common coordinates overlap, over multiple coordinates.
+
+* Many new and updated translations between CF spec and STASH codes or GRIB2
+ parameter codes.
+
+* PP/FF loader creates a height coordinate at 1.5m or 10m for certain relevant
+ stash codes.
+
+* Lazy aggregator support for the
+ :class:`standard deviation ` aggregator has been added.
+
+* A speed improvement in calculation of
+ :func:`iris.analysis.cartography.area_weights`.
+
+* Experimental support for unstructured grids has been added with
+ :func:`iris.experimental.ugrid`. This has been implemented using
+ `UGRID `_.
+
+* :meth:`iris.cube.CubeList.extract_overlapping` supports extraction of cubes
+ over regions where common coordinates overlap, over multiple coordinates.
+
* Warnings raised due to invalid units in loaded data have been suppressed.
-* Experimental low-level read and write access for FieldsFile variants is now supported
- via :class:`iris.experimental.um.FieldsFileVariant`.
+
+* Experimental low-level read and write access for FieldsFile variants is now
+ supported via :class:`iris.experimental.um.FieldsFileVariant`.
+
* PP loader will return cubes for all fields prior to a field with a problematic
header before raising an exception.
-* NetCDF loader skips invalid global attributes, raising a warning rather than raising an
- exception.
+
+* NetCDF loader skips invalid global attributes, raising a warning rather than
+ raising an exception.
+
* A warning is now raised rather than an exception when constructing an
:class:`~iris.aux_factory.AuxCoordFactory` fails.
+
* Supported :class:`aux coordinate factories `
have been extended to include:
@@ -104,78 +133,104 @@ Iris 1.8 features
* ``ocean s coordinate, generic form 1``, and
* ``ocean s coordinate, generic form 2``.
-* :meth:`iris.cube.Cube.intersection` now supports taking a points-only intersection.
- Any bounds on intersected coordinates are ignored but retained.
+* :meth:`iris.cube.Cube.intersection` now supports taking a points-only
+ intersection. Any bounds on intersected coordinates are ignored but retained.
+
* The FF loader's known handled grids now includes ``Grid 21``.
-* A :class:`nearest neighbour ` scheme is now provided for
- :meth:`iris.cube.Cube.interpolate` and :meth:`iris.cube.Cube.regrid`.
-* :func:`iris.analysis.cartography.rotate_winds` supports transformation of wind vectors
- to a different coordinate system.
+
+* A :class:`nearest neighbour ` scheme is now provided
+ for :meth:`iris.cube.Cube.interpolate` and :meth:`iris.cube.Cube.regrid`.
+
+* :func:`iris.analysis.cartography.rotate_winds` supports transformation of
+ wind vectors to a different coordinate system.
+
* NumPy universal functions can now be applied to cubes using
:func:`iris.analysis.maths.apply_ufunc`.
+
* Generic functions can be applied to :class:`~iris.cube.Cube` instances using
:class:`iris.analysis.maths.IFunc`.
-* The :class:`iris.analysis.Linear` scheme now supports regridding as well as interpolation.
- This enables :meth:`iris.cube.Cube.regrid` to perform bilinear regridding, which now
- replaces the experimental routine "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid".
+
+* The :class:`iris.analysis.Linear` scheme now supports regridding as well as
+ interpolation. This enables :meth:`iris.cube.Cube.regrid` to perform bilinear
+ regridding, which now replaces the experimental routine
+ "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid".
+
Bugs fixed
==========
-1.8.0
-------
* Fix in netCDF loader to correctly determine whether the longitude coordinate
(including scalar coordinates) is circular.
-* :meth:`iris.cube.Cube.intersection` now supports bounds that extend slightly beyond 360
- degrees.
-* Lateral Boundary Condition (LBC) type FieldFiles are now handled correctly by the FF loader.
-* Making a copy of a scalar cube with no data now correctly copies the data array.
-* Height coordinates in NAME trajectory output files have been changed to match other
- NAME output file formats.
+
+* :meth:`iris.cube.Cube.intersection` now supports bounds that extend slightly
+ beyond 360 degrees.
+
+* Lateral Boundary Condition (LBC) type FieldFiles are now handled correctly by
+ the FF loader.
+
+* Making a copy of a scalar cube with no data now correctly copies the data
+ array.
+
+* Height coordinates in NAME trajectory output files have been changed to match
+ other NAME output file formats.
+
* Fixed datatype when loading an ``integer_constants`` array from a FieldsFile.
+
* FF/PP loader adds appropriate cell methods for ``lbtim.ib = 3`` intervals.
+
* An exception is raised if the units of the latitude and longitude coordinates
of the cube passed into :func:`iris.analysis.cartography.area_weights` are not
convertible to radians.
+
* GRIB1 loader now creates a time coordinate for a time range indicator of 2.
+
* NetCDF loader now loads units that are empty strings as dimensionless.
-1.8.1
-------
-* The PP loader now carefully handles floating point errors in date time conversions to hours.
-* The handling fill values for lazy data loaded from NetCDF files is altered, such that the
- _FillValue set in the file is preserved through lazy operations.
-* The risk that cube intersections could return incorrect results due to floating point
- tolerances is reduced.
-* The new GRIB2 loading code is altered to enable the loading of various data representation
- templates; the data value unpacking is handled by the GRIB API.
-* Saving cube collections to NetCDF, where multiple similar aux-factories exist within the cubes,
- is now carefully handled such that extra file variables are created where required in some cases.
-
-1.8.2
------
-* A fix to prevent the error: *AttributeError: 'module' object has no attribute 'date2num'*.
- This was caused by the function :func:`netcdftime.date2num` being removed from the netCDF4
- package in recent versions.
+
+v1.8.1 (03 Jun 2015)
+--------------------
+
+* The PP loader now carefully handles floating point errors in date time
+ conversions to hours.
+
+* The handling fill values for lazy data loaded from NetCDF files is altered,
+ such that the _FillValue set in the file is preserved through lazy operations.
+
+* The risk that cube intersections could return incorrect results due to
+ floating point tolerances is reduced.
+
+* The new GRIB2 loading code is altered to enable the loading of various data
+ representation templates; the data value unpacking is handled by the GRIB API.
+
+* Saving cube collections to NetCDF, where multiple similar aux-factories exist
+ within the cubes, is now carefully handled such that extra file variables are
+ created where required in some cases.
+
Deprecations
============
+
* The original GRIB loader has been deprecated and replaced with a new
template-based GRIB loader.
+
* Deprecated default NetCDF save behaviour of assigning the outermost
dimension to be unlimited. Switch to the new behaviour with no auto
assignment by setting :data:`iris.FUTURE.netcdf_no_unlimited` to True.
+
* The former experimental method
- "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid" has been removed, as
- :class:`iris.analysis.Linear` now includes this functionality.
-
-Documentation Changes
-=====================
-* A chapter on :doc:`merge and concatenate ` has been
- added to the :doc:`user guide `.
-* A section on installing Iris using `conda `_ has been
- added to the :doc:`install guide `.
+ "iris.experimental.regrid.regrid_bilinear_rectilinear_src_and_grid" has been
+ removed, as :class:`iris.analysis.Linear` now includes this functionality.
+
+
+Documentation
+=============
+
+* A chapter on :doc:`merge and concatenate ` has
+ been added to the :doc:`user guide `.
+
+* A section on installing Iris using `conda `_ has
+ been added to the :doc:`install guide `.
+
* Updates to the chapter on
:doc:`regridding and interpolation `
have been added to the :doc:`user guide `.
-
diff --git a/docs/iris/src/whatsnew/1.9.rst b/docs/iris/src/whatsnew/1.9.rst
index 7a4848b434..77d03b5de3 100644
--- a/docs/iris/src/whatsnew/1.9.rst
+++ b/docs/iris/src/whatsnew/1.9.rst
@@ -1,32 +1,48 @@
-What's New in Iris 1.9
-**********************
+v1.9 (10 Dec 2015)
+********************
-:Release: 1.9.2
-:Date: 28th January 2016
-
-This document explains the new/changed features of Iris in version 1.9
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 1.9 Features
-=================
-* Support for running on Python 3.4 has been added to the whole code base. Some features which
- depend on external libraries will not be available until they also support Python 3, namely:
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
+
+* Support for running on Python 3.4 has been added to the whole code base.
+ Some features which depend on external libraries will not be available until
+ they also support Python 3, namely:
* gribapi does not yet provide a Python 3 interface
-* Added the UM pseudo level type to the information made available in the STASH_TRANS table in :mod:`iris.fileformats.um._ff_cross_references`
-* When reading "cell_methods" attributes from NetCDF files, allow optional whitespace before the colon.
- This is not strictly in the CF spec, but is a common occurrence.
-* Basic cube arithemetic (plus, minus, times, divide) now supports lazy evaluation.
-* :meth:`iris.analysis.cartography.rotate_winds` can now operate much faster on multi-layer (i.e. > 2-dimensional) cubes,
- as it calculates rotation coefficients only once and reuses them for additional layers.
+* Added the UM pseudo level type to the information made available in the
+ STASH_TRANS table in :mod:`iris.fileformats.um._ff_cross_references`
+
+* When reading "cell_methods" attributes from NetCDF files, allow optional
+ whitespace before the colon. This is not strictly in the CF spec, but is a
+ common occurrence.
-* Linear regridding of a multi-layer (i.e. > 2-dimensional) cube is now much faster,
- as it calculates transform coefficients just once and reuses them for additional layers.
-* Ensemble statistics can now be saved to GRIB2, using Product Definition Template 4.11.
+* Basic cube arithmetic (plus, minus, times, divide) now supports lazy
+ evaluation.
-* Loading of NetCDF data with ocean vertical coordinates now returns a 'depth' in addition to an 'eta' cube.
- This operates on specific defined dimensionless coordinates : see CF spec version 1.6, Appendix D.
+* :meth:`iris.analysis.cartography.rotate_winds` can now operate much faster
+ on multi-layer (i.e. > 2-dimensional) cubes, as it calculates rotation
+ coefficients only once and reuses them for additional layers.
+
+* Linear regridding of a multi-layer (i.e. > 2-dimensional) cube is now much
+ faster, as it calculates transform coefficients just once and reuses them for
+ additional layers.
+
+* Ensemble statistics can now be saved to GRIB2, using Product Definition
+ Template 4.11.
+
+* Loading of NetCDF data with ocean vertical coordinates now returns a 'depth'
+ in addition to an 'eta' cube. This operates on specific defined
+ dimensionless coordinates : see CF spec version 1.6, Appendix D.
* :func:`iris.analysis.stats.pearsonr` updates:
@@ -37,14 +53,21 @@ Iris 1.9 Features
* Accepts common_mask keyword for restricting calculation to unmasked pairs of
cells.
-* Added a new point-in-cell regridding scheme, :class:`iris.experimental.regrid.PointInCell`.
-* Added :meth:`iris.analysis.WPERCENTILE` - a new weighted aggregator for calculating
- percentiles.
-* Added cell-method translations for LBPROC=64 and 192 in UM files, encoding 'zonal mean' and 'zonal+time mean'.
+* Added a new point-in-cell regridding scheme,
+ :class:`iris.experimental.regrid.PointInCell`.
+
+* Added :meth:`iris.analysis.WPERCENTILE` - a new weighted aggregator for
+ calculating percentiles.
+
+* Added cell-method translations for LBPROC=64 and 192 in UM files, encoding
+ 'zonal mean' and 'zonal+time mean'.
+
+* Support for loading GRIB2 messages defined on a Lambert conformal grid has
+ been added to the GRIB2 loader.
+
+* Data on potential-temperature (theta) levels can now be saved to GRIB2, with
+ a fixed surface type of 107.
-* Support for loading GRIB2 messages defined on a Lambert conformal grid has been added to
- the GRIB2 loader.
-* Data on potential-temperature (theta) levels can now be saved to GRIB2, with a fixed surface type of 107.
* Added several new helper functions for file-save customisation,
(see also : :doc:`Saving Iris Cubes `):
@@ -54,76 +77,130 @@ Iris 1.9 Features
* :meth:`iris.fileformats.pp.as_pairs`
* :meth:`iris.fileformats.pp.as_fields`
* :meth:`iris.fileformats.pp.save_fields`
-* Loading data from GRIB2 now supports most of the currently defined 'data representation templates' :
- code numbers 0, 1, 2, 3, 4, 40, 41, 50, 51 and 61.
-* When a Fieldsfile is opened for update as a :class:`iris.experimental.um.FieldsFileVariant`,
- unmodified packed data in the file can now be retained in the original form.
- Previously it could only be stored in an unpacked form.
+
+* Loading data from GRIB2 now supports most of the currently defined 'data
+ representation templates' : code numbers 0, 1, 2, 3, 4, 40, 41, 50, 51 and 61.
+
+* When a Fieldsfile is opened for update as a
+ :class:`iris.experimental.um.FieldsFileVariant`, unmodified packed data in
+ the file can now be retained in the original form. Previously it could only
+ be stored in an unpacked form.
+
* When reading and writing NetCDF data, the CF 'flag' attributes,
- "flag_masks", "flag_meanings" and "flag_values" are now preserved through Iris load and save.
-* `mo_pack `_ was added as an optional dependency.
+ "flag_masks", "flag_meanings" and "flag_values" are now preserved through
+ Iris load and save.
+
+* `mo_pack `_ was added as an optional
+ dependency.
It is used to encode and decode data in WGDOS packed form.
-* The :meth:`iris.experimental.um.Field.get_data` method can now be used to read Fieldsfile data
- after the original :class:`iris.experimental.um.FieldsFileVariant` has been closed.
-Bugs Fixed
+* The :meth:`iris.experimental.um.Field.get_data` method can now be used to
+ read Fieldsfile data after the original
+ :class:`iris.experimental.um.FieldsFileVariant` has been closed.
+
+Bugs fixed
==========
+
* Fixed a bug in :meth:`iris.unit.Unit.convert`
(and the equivalent in `cf_units `_)
- so that it now converts data to the native endianness, without which udunits could not read it correctly.
+ so that it now converts data to the native endianness, without which udunits
+ could not read it correctly.
+
* Fixed a bug with loading WGDOS packed data in :mod:`iris.experimental.um`,
which could occasionally crash, with some data.
-* Ignore non-numeric suffices in the numpy version string, which would otherwise crash some regridding routines.
+
+* Ignore non-numeric suffices in the numpy version string, which would
+ otherwise crash some regridding routines.
+
* fixed a bug in :mod:`iris.fileformats.um_cf_map` where the standard name
- for the stash code m01s12i187 was incorrectly set, such that it is inconsistent
- with the stated unit of measure, 'm s-1'. The different name, a long_name
- of 'change_over_time_in_upward_air_velocity_due_to_advection' with
+ for the stash code m01s12i187 was incorrectly set, such that it is
+ inconsistent with the stated unit of measure, 'm s-1'. The different name,
+ a long_name of 'change_over_time_in_upward_air_velocity_due_to_advection' with
units of 'm s-1' is now used instead.
+
* Fixed a bug in :meth:`iris.cube.Cube.intersection`.
- When edge points were at (base + period), intersection would unnecessarily wrap the data.
+ When edge points were at (base + period), intersection would unnecessarily
+ wrap the data.
+
* Fixed a bug in :mod:`iris.fileformats.pp`.
- A previous release removed the ability to pass a partial constraint on STASH attribute.
-* :meth:`iris.plot.default_projection_extent` now correctly raises an exception if a cube has X bounds but no Y bounds, or vice versa.
- Previously it never failed this, as the test was wrong.
-* When loading NetCDF data, a "units" attribute containing unicode characters is now transformed by backslash-replacement.
- Previously this caused a crash. Note: unicode units are *not supported in the CF conventions*.
-* When saving to NetCDF, factory-derived auxiliary coordinates are now correctly saved with different names when they are not identical.
- Previously, such coordinates could be saved with the same name, leading to errors.
+ A previous release removed the ability to pass a partial constraint on STASH
+ attribute.
+
+* :meth:`iris.plot.default_projection_extent` now correctly raises an exception
+ if a cube has X bounds but no Y bounds, or vice versa. Previously it never
+ failed this, as the test was wrong.
+
+* When loading NetCDF data, a "units" attribute containing unicode characters
+ is now transformed by backslash-replacement. Previously this caused a crash.
+ Note: unicode units are *not supported in the CF conventions*.
+
+* When saving to NetCDF, factory-derived auxiliary coordinates are now correctly
+ saved with different names when they are not identical. Previously, such
+ coordinates could be saved with the same name, leading to errors.
+
* Fixed a bug in :meth:`iris.experimental.um.FieldsFileVariant.close`,
which now correctly allocates extra blocks for larger lookups when saving.
- Previously, when larger files open for update were closed, they could be written out with data overlapping the lookup table.
+ Previously, when larger files open for update were closed, they could be
+ written out with data overlapping the lookup table.
+
* Fixed a bug in :class:`iris.aux_factory.OceanSigmaZFactory`
- which sometimes caused crashes when fetching the points of an "ocean sigma z" coordinate.
+ which sometimes caused crashes when fetching the points of an "ocean sigma z"
+ coordinate.
+
-Version 1.9.1
--------------
-* Fixed a unicode bug preventing standard names from being built cleanly when installing in Python3
+v1.9.1 (05 Jan 2016)
+--------------------
+
+* Fixed a unicode bug preventing standard names from being built cleanly when
+ installing in Python3
+
+
+v1.9.2 (28 Jan 2016)
+--------------------
+
+* New warning regarding data loss if writing to an open file which is also
+ open to read, with lazy data.
-Version 1.9.2
--------------
-* New warning regarding data loss if writing to an open file which is also open to read, with lazy data.
* Removal of a warning about data payload loading from concatenate.
+
* Updates to concatenate documentation.
+
* Fixed a bug with a name change in the netcdf4-python package.
+
* Fixed a bug building the documentation examples.
-* Fixed a bug avoiding sorting classes directly when :meth:`iris.cube.Cube.coord_system` is used in Python3.
+
+* Fixed a bug avoiding sorting classes directly when
+ :meth:`iris.cube.Cube.coord_system` is used in Python3.
+
* Fixed a bug regarding unsuccessful dot import.
-Incompatible Changes
+
+Incompatible changes
====================
-* GRIB message/file reading and writing may not be available for Python 3 due to GRIB API limitations.
+
+* GRIB message/file reading and writing may not be available for Python 3 due
+ to GRIB API limitations.
+
Deprecations
============
-* Deprecated :mod:`iris.unit`, with unit functionality provided by `cf_units `_ instead.
-* When loading from NetCDF, a deprecation warning is emitted if there is vertical coordinate information
- that *would* produce extra result cubes if :data:`iris.FUTURE.netcdf_promote` were set,
- but it is *not* set.
+
+* Deprecated :mod:`iris.unit`, with unit functionality provided by
+ `cf_units `_ instead.
+
+* When loading from NetCDF, a deprecation warning is emitted if there is
+ vertical coordinate information that *would* produce extra result cubes if
+ :data:`iris.FUTURE.netcdf_promote` were set, but it is *not* set.
+
* Deprecated :class:`iris.aux_factory.LazyArray`
-Documentation Changes
-=====================
+
+Documentation
+=============
+
* A chapter on :doc:`saving iris cubes ` has been
added to the :doc:`user guide `.
-* Added script and documentation for building a what's new page from developer-submitted contributions.
- See :doc:`Contributing a "What's New" entry `.
+
+* Added script and documentation for building a what's new page from
+ developer-submitted contributions. See
+ :doc:`Contributing a "What's New" entry `.
diff --git a/docs/iris/src/whatsnew/2.0.rst b/docs/iris/src/whatsnew/2.0.rst
index 43d60a8539..577e8fea22 100644
--- a/docs/iris/src/whatsnew/2.0.rst
+++ b/docs/iris/src/whatsnew/2.0.rst
@@ -1,16 +1,18 @@
-What's New in Iris 2.0.0
-************************
+v2.0 (14 Feb 2018)
+******************
-:Release: 2.0.0rc1
-:Date: 2018-01-11
+This document explains the changes made to Iris for this release
+(:doc:`View all changes `.)
-This document explains the new/changed features of Iris in version 2.0.0
-(:doc:`View all changes `).
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
-Iris 2.0.0 Features
-===================
+Features
+========
+
.. _showcase:
.. admonition:: Dask Integration
@@ -114,7 +116,7 @@ all existing toggles in :attr:`iris.FUTURE` now default to :data:`True`.
off is now deprecated.
-Bugs Fixed
+Bugs fixed
==========
* Indexing or slicing an :class:`~iris.coords.AuxCoord` coordinate will return a coordinate with
@@ -209,8 +211,8 @@ Incompatible Changes
printed as ``m.s-1``.
-Deprecation removals
---------------------
+Deprecation
+===========
All deprecated functionality that was announced for removal in Iris 2.0 has
been removed. In particular:
@@ -289,8 +291,8 @@ been removed. In particular:
removed from the :class:`iris.fileformats.rules.Loader` constructor.
-Documentation Changes
-=====================
+Documentation
+=============
* A new UserGuide chapter on :doc:`Real and Lazy Data
` has been added, and referenced from key
diff --git a/docs/iris/src/whatsnew/2.1.rst b/docs/iris/src/whatsnew/2.1.rst
index 00f7115431..311e8c251b 100644
--- a/docs/iris/src/whatsnew/2.1.rst
+++ b/docs/iris/src/whatsnew/2.1.rst
@@ -1,37 +1,17 @@
-What's New in Iris 2.1
-**********************
+v2.1 (06 Jun 2018)
+******************
-:Release: 2.1
-:Date: 2018-06-06
+This document explains the changes made to Iris for this release
+(:doc:`View all changes `.)
-This document explains the new/changed features of Iris in version 2.1
-(:doc:`older "What's New" release notes can be found here`.)
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
-Iris 2.1 Dependency updates
-===========================
-* The `cf_units `_ dependency
- was updated to cf_units ``v2.0``.
- cf_units v2 is almost entirely backwards compatible with v1.
- However the ability to preserve some aliased calendars has been removed.
- For this reason, it is possible that NetCDF load of a variable with a
- "standard" calendar will result in a saved NetCDF of a "gregorian"
- calendar.
-* Iris updated its time-handling functionality from the
- `netcdf4-python `_
- ``netcdftime`` implementation to the standalone module
- `cftime `_.
- cftime is entirely compatible with netcdftime, but some issues may
- occur where users are constructing their own datetime objects.
- In this situation, simply replacing ``netcdftime.datetime`` with
- ``cftime.datetime`` should be sufficient.
-* Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy.
- Full requirements can be seen in the `requirements `_
- directory of the Iris' the source.
-
-Iris 2.1 Features
-=================
+Features
+========
* Added ``repr_html`` functionality to the :class:`~iris.cube.Cube` to provide
a rich html representation of cubes in Jupyter notebooks. Existing functionality
@@ -42,42 +22,81 @@ Iris 2.1 Features
* Updated :func:`iris.cube.Cube.name` to return a STASH code if the cube has
one and no other valid names are present. This is now consistent with the
summary information from :func:`iris.cube.Cube.summary`.
+
* The partial collapse of multi-dimensional auxiliary coordinates is now
supported. Collapsed bounds span the range of the collapsed dimension(s).
+
* Added new function :func:`iris.cube.CubeList.realise_data` to compute
multiple lazy values in a single operation, avoiding repeated re-loading of
data or re-calculation of expressions.
+
* The methods :meth:`iris.cube.Cube.convert_units` and
:meth:`iris.coords.Coord.convert_units` no longer forcibly realise the cube
data or coordinate points/bounds. The converted values are now lazy arrays
if the originals were.
+
* Added :meth:`iris.analysis.trajectory.interpolate` that allows you to
interpolate to find values along a trajectory.
+
* It is now possible to add an attribute of ``missing_value`` to a cube
(:issue:`1588`).
+
* Iris can now represent data on the Albers Equal Area Projection,
and the NetCDF loader and saver were updated to handle this. (:issue:`2943`)
+
* The :class:`~iris.coord_systems.Mercator` projection has been updated to accept
the ``standard_parallel`` keyword argument (:pull:`3041`).
-Bugs Fixed
+
+Bugs fixed
==========
* All var names being written to NetCDF are now CF compliant.
Non alpha-numeric characters are replaced with '_', and var names now always
have a leading letter (:pull:`2930`).
+
* A cube resulting from a regrid operation using the `iris.analysis.AreaWeighted`
regridding scheme will now have the smallest floating point data type
to which the source cube's data type can be safely converted using NumPy's
type promotion rules.
+
* :mod:`iris.quickplot` labels now honour the axes being drawn to when using the
``axes`` keyword (:pull:`3010`).
-Incompatible Changes
+
+Incompatible changes
====================
+
* The deprecated :mod:`iris.experimental.um` was removed.
Please use consider using `mule `_
as an alternative.
+
* This release of Iris contains a number of updated metadata translations.
- See [this changelist](https://github.com/SciTools/iris/commit/69597eb3d8501ff16ee3d56aef1f7b8f1c2bb316#diff-1680206bdc5cfaa83e14428f5ba0f848)
+ See this
+ `changelist `_
for further information.
+
+
+Internal
+========
+
+* The `cf_units `_ dependency
+ was updated to cf_units ``v2.0``.
+ cf_units v2 is almost entirely backwards compatible with v1.
+ However the ability to preserve some aliased calendars has been removed.
+ For this reason, it is possible that NetCDF load of a variable with a
+ "standard" calendar will result in a saved NetCDF of a "gregorian"
+ calendar.
+
+* Iris updated its time-handling functionality from the
+ `netcdf4-python `_
+ ``netcdftime`` implementation to the standalone module
+ `cftime `_.
+ cftime is entirely compatible with netcdftime, but some issues may
+ occur where users are constructing their own datetime objects.
+ In this situation, simply replacing ``netcdftime.datetime`` with
+ ``cftime.datetime`` should be sufficient.
+
+* Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy.
+ Full requirements can be seen in the `requirements `_
+ directory of the Iris' the source.
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/2.2.rst b/docs/iris/src/whatsnew/2.2.rst
index 1eff99ecb4..314f84355f 100644
--- a/docs/iris/src/whatsnew/2.2.rst
+++ b/docs/iris/src/whatsnew/2.2.rst
@@ -1,17 +1,18 @@
-What's New in Iris 2.2
-************************
+v2.2 (11 Oct 2018)
+******************
-:Release: 2.2.0
-:Date:
+This document explains the changes made to Iris for this release
+(:doc:`View all changes `.)
-This document explains the new/changed features of Iris in the release
-of version 2.2
-(:doc:`View all changes `).
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
-Iris 2.2 Features
-===================
+Features
+========
+
.. _showcase:
.. admonition:: 2-Dimensional Coordinate Plotting
@@ -70,19 +71,7 @@ Iris 2.2 Features
a NaN-tolerant array comparison.
-Iris 2.2 Dependency updates
-=============================
-
-* Iris is now using the latest version release of dask (currently 0.19.3)
-
-* Proj4 has been temporarily pinned to version < 5 while problems with the
- Mollweide projection are addressed.
-
-* Matplotlib has been pinned to version < 3 temporarily while we account for
- its changes in all SciTools libraries.
-
-
-Bugs Fixed
+Bugs fixed
==========
* The bug has been fixed that prevented printing time coordinates with bounds
@@ -93,7 +82,7 @@ Bugs Fixed
bound data is actually masked.
-Bugs fixed in v2.2.1
+v2.2.1 (28 May 2019)
--------------------
* Iris can now correctly unpack a column of header objects when saving a
@@ -108,9 +97,20 @@ Bugs fixed in v2.2.1
floating-point arithmetic.
+Internal
+========
+
+* Iris is now using the latest version release of dask (currently 0.19.3)
+
+* Proj4 has been temporarily pinned to version < 5 while problems with the
+ Mollweide projection are addressed.
+
+* Matplotlib has been pinned to version < 3 temporarily while we account for
+ its changes in all SciTools libraries.
+
-Documentation Changes
-=====================
+Documentation
+=============
* Iris' `INSTALL` document has been updated to include guidance for running
tests.
diff --git a/docs/iris/src/whatsnew/2.3.rst b/docs/iris/src/whatsnew/2.3.rst
index 872fb44cd6..914d86fda2 100644
--- a/docs/iris/src/whatsnew/2.3.rst
+++ b/docs/iris/src/whatsnew/2.3.rst
@@ -1,14 +1,18 @@
-What's New in Iris 2.3.0
-************************
+v2.3 (19 Dec 2019)
+******************
-:Release: 2.3.0
-:Date: 2019-12-19
-
-This document explains the new/changed features of Iris in version 2.3.0
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 2.3.0 Features
-===================
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
+
.. _showcase:
.. admonition:: Support for CF 1.7
@@ -81,7 +85,7 @@ Iris 2.3.0 Features
previously could produce a large number of small chunks. This had an adverse
effect on performance.
- In addition, Iris now takes its default chunksize from the default configured
+ In addition, Iris now takes its default chunk size from the default configured
in Dask itself, i.e. ``dask.config.get('array.chunk-size')``.
.. admonition:: Lazy Statistics
@@ -103,119 +107,158 @@ Iris 2.3.0 Features
relaxed
tolerance : This means that some cubes may now test 'equal' that previously
did not.
- Previously, Iris compared cube data arrays using:
- ``abs(a - b) < 1.e-8``
+ Previously, Iris compared cube data arrays using
+ ``abs(a - b) < 1.e-8``
We now apply the default operation of :func:`numpy.allclose` instead,
- which is equivalent to:
- ``abs(a - b) < (1.e-8 + 1.e-5 * b)``
+ which is equivalent to
+ ``abs(a - b) < (1.e-8 + 1.e-5 * b)``
* Added support to render HTML for :class:`~iris.cube.CubeList` in Jupyter
Notebooks and JupyterLab.
+
* Loading CellMeasures with integer values is now supported.
+
* New coordinate system: :class:`iris.coord_systems.Geostationary`,
including load and save support, based on the `CF Geostationary projection
definition `_.
+
* :class:`iris.coord_systems.VerticalPerspective` can now be saved to and
loaded from NetCDF files.
+
* :class:`iris.experimental.regrid.PointInCell` moved to
:class:`iris.analysis.PointInCell` to make this regridding scheme public
-* Iris now supports standard name modifiers. See `Appendix C, Standard Name Modifiers `_ for more information.
+
+* Iris now supports standard name modifiers. See
+ `Appendix C, Standard Name Modifiers `_
+ for more information.
+
* :meth:`iris.cube.Cube.remove_cell_measure` now also allows removal of a cell
measure by its name (previously only accepted a CellMeasure object).
+
* The :data:`iris.analysis.RMS` aggregator now supports a lazy calculation.
However, the "weights" keyword is not currently supported by this, so a
*weighted* calculation will still return a realised result, *and* force
realisation of the original cube data.
-* Iris now supports NetCDF Climate and Forecast (CF) Metadata Conventions 1.7 (see `CF 1.7 Conventions Document `_ for more information)
+
+* Iris now supports NetCDF Climate and Forecast (CF) Metadata Conventions 1.7
+ (see `CF 1.7 Conventions Document `_ for more information)
+
* Updated standard name support to
`CF standard name table version 70, 2019-12-10 `_
+
* Updated UM STASH translations to
`metarelate/metOcean commit 448f2ef, 2019-11-29 `_
-Iris 2.3.0 Dependency Updates
-=============================
-* Iris now supports Proj4 up to version 5, but not yet 6 or beyond, pending
- `fixes to some cartopy tests `_.
-* Iris now requires Dask >= 1.2 to allow for improved coordinate equality
- checks.
-
-
-Bugs Fixed
+Bugs fixed
==========
+
* Cube equality of boolean data is now handled correctly.
+
* Fixed a bug where cell measures were incorrect after a cube
:meth:`~iris.cube.Cube.transpose` operation. Previously, this resulted in
cell-measures that were no longer correctly mapped to the cube dimensions.
-* The :class:`~iris.coords.AuxCoord` disregarded masked points and bounds, as did the :class:`~iris.coords.DimCoord`.
- Fix permits an :class:`~iris.coords.AuxCoord` to contain masked points/bounds, and a TypeError exception is now
- raised when attempting to create or set the points/bounds of a
- :class:`~iris.coords.DimCoord` with arrays with missing points.
+
+* The :class:`~iris.coords.AuxCoord` disregarded masked points and bounds, as
+ did the :class:`~iris.coords.DimCoord`. Fix permits an
+ :class:`~iris.coords.AuxCoord` to contain masked points/bounds, and a
+ TypeError exception is now raised when attempting to create or set the
+ points/bounds of a :class:`~iris.coords.DimCoord` with arrays with missing
+ points.
+
* :class:`iris.coord_systems.VerticalPerspective` coordinate system now uses
the `CF Vertical perspective definition `_; had been
erroneously using Geostationary.
-* :class:`~iris.coords.CellMethod` will now only use valid `NetCDF name tokens `_ to reference the coordinates involved in the statistical operation.
-* The following var_name properties will now only allow valid `NetCDF name
- tokens
- `_ to
- reference the said NetCDF variable name. Note that names with a leading
+
+* :class:`~iris.coords.CellMethod` will now only use valid
+ `NetCDF name tokens`_ to reference the coordinates involved in the
+ statistical operation.
+
+* The following var_name properties will now only allow valid
+ `NetCDF name tokens`_
+ to reference the said NetCDF variable name. Note that names with a leading
underscore are not permitted.
- - :attr:`iris.aux_factory.AuxCoordFactory.var_name`
- - :attr:`iris.coords.CellMeasure.var_name`
- - :attr:`iris.coords.Coord.var_name`
- - :attr:`iris.coords.AuxCoord.var_name`
- - :attr:`iris.cube.Cube.var_name`
+
+.. _NetCDF name tokens: https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_data_set_components.html#object_name
+
+ * :attr:`iris.aux_factory.AuxCoordFactory.var_name`
+ * :attr:`iris.coords.CellMeasure.var_name`
+ * :attr:`iris.coords.Coord.var_name`
+ * :attr:`iris.coords.AuxCoord.var_name`
+ * :attr:`iris.cube.Cube.var_name`
+
* Rendering a cube in Jupyter will no longer crash for a cube with
attributes containing ``\n``.
+
* NetCDF variables which reference themselves in their ``cell_measures``
attribute can now be read.
+
* :func:`~iris.plot.quiver` now handles circular coordinates.
+
* The names of cubes loaded from abf/abl files have been corrected.
+
* Fixed a bug in UM file loading, where any landsea-mask-compressed fields
(i.e. with LBPACK=x2x) would cause an error later, when realising the data.
+
* :meth:`iris.cube.Cube.collapsed` now handles partial collapsing of
multidimensional coordinates that have bounds.
+
* Fixed a bug in the :data:`~iris.analysis.PROPORTION` aggregator, where cube
data in the form of a masked array with ``array.mask=False`` would cause an
error, but possibly only later when the values are actually realised.
( Note: since netCDF4 version 1.4.0, this is now a common form for data
loaded from netCDF files ).
+
* Fixed a bug where plotting a cube with a
:class:`iris.coord_systems.LambertConformal` coordinate system would result
in an error. This would happen if the coordinate system was defined with one
standard parallel, rather than two.
In these cases, a call to
:meth:`~iris.coord_systems.LambertConformal.as_cartopy_crs` would fail.
+
* :meth:`iris.cube.Cube.aggregated_by` now gives correct values in points and
bounds when handling multidimensional coordinates.
+
* Fixed a bug in the :meth:`iris.cube.Cube.collapsed` operation, which caused
the unexpected realization of any attached auxiliary coordinates that were
*bounded*. It now correctly produces a lazy result and does not realise
the original attached AuxCoords.
-Documentation Changes
-=====================
+Internal
+========
+
+* Iris now supports Proj4 up to version 5, but not yet 6 or beyond, pending
+ `fixes to some cartopy tests `_.
+
+* Iris now requires Dask >= 1.2 to allow for improved coordinate equality
+ checks.
+
+
+Documentation
+=============
+
* Adopted a
- `new colour logo for Iris <../_static/Iris7_1_trim_full.png>`_
-* Added a gallery example showing `how to concatenate NEMO ocean model data
- <../examples/Oceanography/load_nemo.html>`_.
-* Added an example in the
- `Loading Iris Cubes: Constraining on Time <../userguide/loading_iris_cubes
- .html#constraining-on-time>`_
- Userguide section, demonstrating how to load data within a specified date
+ `new colour logo for Iris `_
+
+* Added a gallery example showing how to concatenate NEMO ocean model data,
+ see :ref:`sphx_glr_generated_gallery_oceanography_plot_load_nemo.py`.
+
+* Added an example for loading Iris cubes for :ref:`using-time-constraints`
+ in the user guide, demonstrating how to load data within a specified date
range.
+
* Added notes to the :func:`iris.load` documentation, and the userguide
- `Loading Iris Cubes <../userguide/loading_iris_cubes.html>`_
- chapter, emphasizing that the *order* of the cubes returned by an iris load
- operation is effectively random and unstable, and should not be relied on.
+ :ref:`loading_iris_cubes` chapter, emphasizing that the *order* of the cubes
+ returned by an iris load operation is effectively random and unstable, and
+ should not be relied on.
+
* Fixed references in the documentation of
- :func:`iris.util.find_discontiguities` to a nonexistent
+ :func:`iris.util.find_discontiguities` to a non existent
"mask_discontiguities" routine : these now refer to
:func:`~iris.util.mask_cube`.
diff --git a/docs/iris/src/whatsnew/2.4.rst b/docs/iris/src/whatsnew/2.4.rst
index 2facb97a7a..ca7be20cd8 100644
--- a/docs/iris/src/whatsnew/2.4.rst
+++ b/docs/iris/src/whatsnew/2.4.rst
@@ -1,23 +1,25 @@
-What's New in Iris 2.4.0
-************************
+v2.4 (20 Feb 2020)
+******************
-:Release: 2.4.0
-:Date: 2020-02-20
-
-This document explains the new/changed features of Iris in version 2.4.0
+This document explains the changes made to Iris for this release
(:doc:`View all changes `.)
-Iris 2.4.0 Features
-===================
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
.. admonition:: Last python 2 version of Iris
- Iris 2.4 is a final extra release of Iris 2, which back-ports specific desired features from
- Iris 3 (not yet released).
+ Iris 2.4 is a final extra release of Iris 2, which back-ports specific
+ desired features from Iris 3 (not yet released).
- The purpose of this is both to support early adoption of certain newer features,
- and to provide a final release for Python 2.
+ The purpose of this is both to support early adoption of certain newer
+ features, and to provide a final release for Python 2.
The next release of Iris will be version 3.0 : a major-version release which
introduces breaking API and behavioural changes, and only supports Python 3.
@@ -25,35 +27,42 @@ Iris 2.4.0 Features
* :class:`iris.coord_systems.Geostationary` can now accept creation arguments of
`false_easting=None` or `false_northing=None`, equivalent to values of 0.
Previously these kwargs could be omitted, but could not be set to `None`.
- This also enables loading of netcdf data on a Geostationary grid, where either of these
- keys is not present as a grid-mapping variable property : Previously, loading any
- such data caused an exception.
-* The area weights used when performing area weighted regridding with :class:`iris.analysis.AreaWeighted`
- are now cached.
- This allows a significant speedup when regridding multiple similar cubes, by repeatedly using
- a `'regridder' object <../iris/iris/analysis.html?highlight=regridder#iris.analysis.AreaWeighted.regridder>`_
+ This also enables loading of netcdf data on a Geostationary grid, where
+ either of these keys is not present as a grid-mapping variable
+ property : Previously, loading any such data caused an exception.
+
+* The area weights used when performing area weighted regridding with
+ :class:`iris.analysis.AreaWeighted` are now cached. This allows a
+ significant speed up when regridding multiple similar cubes, by repeatedly
+ using a :func:`iris.analysis.AreaWeighted.regridder` objects
which you created first.
-* Name constraint matching against cubes during loading or extracting has been relaxed from strictly matching
- against the :meth:`~iris.cube.Cube.name`, to matching against either the
- ``standard_name``, ``long_name``, NetCDF ``var_name``, or ``STASH`` attributes metadata of a cube.
-* Cubes and coordinates now have a new ``names`` property that contains a tuple of the
- ``standard_name``, ``long_name``, NetCDF ``var_name``, and ``STASH`` attributes metadata.
-* The :class:`~iris.NameConstraint` provides richer name constraint matching when loading or extracting
- against cubes, by supporting a constraint against any combination of
- ``standard_name``, ``long_name``, NetCDF ``var_name`` and ``STASH``
- from the attributes dictionary of a :class:`~iris.cube.Cube`.
-
-
-Iris 2.4.0 Dependency Updates
-=============================
-* Iris is now able to use the latest version of matplotlib.
+* Name constraint matching against cubes during loading or extracting has been
+ relaxed from strictly matching against the :meth:`~iris.cube.Cube.name`, to
+ matching against either the ``standard_name``, ``long_name``, NetCDF
+ ``var_name``, or ``STASH`` attributes metadata of a cube.
+
+* Cubes and coordinates now have a new ``names`` property that contains a tuple
+ of the ``standard_name``, ``long_name``, NetCDF ``var_name``, and ``STASH``
+ attributes metadata.
-Bugs Fixed
+* The :class:`~iris.NameConstraint` provides richer name constraint matching
+ when loading or extracting against cubes, by supporting a constraint against
+ any combination of ``standard_name``, ``long_name``, NetCDF ``var_name`` and
+ ``STASH`` from the attributes dictionary of a :class:`~iris.cube.Cube`.
+
+
+Bugs fixed
==========
+
* Fixed a problem which was causing file loads to fetch *all* field data
whenever UM files (PP or Fieldsfiles) were loaded.
- With large sourcefiles, initial file loads are slow, with large memory usage
+ With large source files, initial file loads are slow, with large memory usage
before any cube data is even fetched. Large enough files will cause a crash.
The problem occurs only with Dask versions >= 2.0.
+
+Internal
+========
+
+* Iris is now able to use the latest version of matplotlib.
diff --git a/docs/iris/src/whatsnew/aggregate_directory.py b/docs/iris/src/whatsnew/aggregate_directory.py
deleted file mode 100644
index c7b497307f..0000000000
--- a/docs/iris/src/whatsnew/aggregate_directory.py
+++ /dev/null
@@ -1,337 +0,0 @@
-# Copyright Iris contributors
-#
-# This file is part of Iris and is released under the LGPL license.
-# See COPYING and COPYING.LESSER in the root of the repository for full
-# licensing details.
-"""
-Build a release file from files in a contributions directory.
-
-Looks for directories "<...whatsnew>/contributions_".
-Takes specified "xx.xx" as version, or latest found (alphabetic).
-Writes a file "<...whatsnew>/.rst".
-
-Valid contributions filenames are of the form:
- __summary.txt
-Where can be any valid chars, and
- is one of :
- "newfeature" "bugfix" "incompatiblechange" "deprecate" "docchange", and
- is in the style "2001-Jan-23".
-
-"""
-
-import datetime
-from glob import glob
-import os
-import re
-import argparse
-import warnings
-from operator import itemgetter
-from distutils import version
-
-# Regular expressions: CONTRIBUTION_REGEX matches the filenames of
-# contribution snippets. It is split into three sections separated by _
-# 0. String for the category. 1. ISO8601 date. 2. String for the feature name.
-# RELEASE_REGEX matches the directory names, returning the release.
-CONTRIBUTION_REGEX_STRING = r"(?P.*)"
-CONTRIBUTION_REGEX_STRING += r"_(?P\d{4}-\w{3}-\d{2})"
-CONTRIBUTION_REGEX_STRING += r"_(?P.*)\.txt$"
-CONTRIBUTION_REGEX = re.compile(CONTRIBUTION_REGEX_STRING)
-RELEASEDIR_PREFIX = r"contributions_"
-_RELEASEDIR_REGEX_STRING = RELEASEDIR_PREFIX + r"(?P.*)$"
-RELEASE_REGEX = re.compile(_RELEASEDIR_REGEX_STRING)
-SOFTWARE_NAME = "Iris"
-EXTENSION = ".rst"
-VALID_CATEGORIES = [
- {"Prefix": "newfeature", "Title": "Features"},
- {"Prefix": "bugfix", "Title": "Bugs Fixed"},
- {"Prefix": "incompatiblechange", "Title": "Incompatible Changes"},
- {"Prefix": "deprecate", "Title": "Deprecations"},
- {"Prefix": "docchange", "Title": "Documentation Changes"},
-]
-VALID_CATEGORY_PREFIXES = [cat["Prefix"] for cat in VALID_CATEGORIES]
-
-
-def _self_root_directory():
- return os.path.abspath(os.path.dirname(__file__))
-
-
-def _decode_contribution_filename(file_name):
- file_name_elements = CONTRIBUTION_REGEX.match(file_name)
- category = file_name_elements.group("category")
- if category not in VALID_CATEGORY_PREFIXES:
- # This is an error
- raise ValueError("Unknown category in contribution filename.")
- isodate = file_name_elements.group("isodate")
- date_of_item = datetime.datetime.strptime(isodate, "%Y-%b-%d").date()
- return category, isodate, date_of_item
-
-
-def is_release_directory(directory_name, release):
- """Returns True if a given directory name matches the requested release."""
- result = False
- directory_elements = RELEASE_REGEX.match(directory_name)
- try:
- release_string = directory_elements.group("release")
- directory_release = version.StrictVersion(release_string)
- except (AttributeError, ValueError):
- pass
- else:
- if directory_release == release:
- result = True
- return result
-
-
-def is_compiled_release(root_directory, release):
- """Returns True if the requested release.rst file exists."""
- result = False
- compiled_filename = "{!s}{}".format(release, EXTENSION)
- compiled_filepath = os.path.join(root_directory, compiled_filename)
- if os.path.exists(compiled_filepath) and os.path.isfile(compiled_filepath):
- result = True
- return result
-
-
-def get_latest_release(root_directory=None):
- """
- Implement default=latest release identification.
-
- Returns a valid release code.
-
- """
- if root_directory is None:
- root_directory = _self_root_directory()
- directory_contents = os.listdir(root_directory)
- # Default release to latest visible dir.
- possible_release_dirs = [
- releasedir_name
- for releasedir_name in directory_contents
- if RELEASE_REGEX.match(releasedir_name)
- ]
- if len(possible_release_dirs) == 0:
- dirspec = os.path.join(root_directory, RELEASEDIR_PREFIX + "*")
- msg = "No valid release directories found, i.e. {!r}."
- raise ValueError(msg.format(dirspec))
- release_dirname = sorted(possible_release_dirs)[-1]
- release = RELEASE_REGEX.match(release_dirname).group("release")
- return release
-
-
-def find_release_directory(
- root_directory, release=None, fail_on_existing=True
-):
- """
- Returns the matching contribution directory or raises an exception.
-
- Defaults to latest-found release (from release directory names).
- Optionally, fail if the matching release file already exists.
- *Always* fail if no release directory exists.
-
- """
- if release is None:
- # Default to latest release.
- release = get_latest_release(root_directory)
-
- if fail_on_existing:
- compiled_release = is_compiled_release(root_directory, release)
- if compiled_release:
- msg = (
- "Specified release {!r} is already compiled : "
- "{!r} already exists."
- )
- compiled_filename = str(release) + EXTENSION
- raise ValueError(msg.format(release, compiled_filename))
-
- directory_contents = os.listdir(root_directory)
- result = None
- for inode in directory_contents:
- node_path = os.path.join(root_directory, inode)
- if os.path.isdir(node_path):
- release_directory = is_release_directory(inode, release)
- if release_directory:
- result = os.path.join(root_directory, inode)
- break
- if not result:
- msg = "Contribution folder for release {!s} does not exist : no {!r}."
- release_dirname = RELEASEDIR_PREFIX + str(release) + "/"
- release_dirpath = os.path.join(root_directory, release_dirname)
- raise ValueError(msg.format(release, release_dirpath))
- return result
-
-
-def generate_header(release, unreleased=False):
- """Return a list of text lines that make up a header for the document."""
- if unreleased:
- isodatestamp = ""
- else:
- isodatestamp = datetime.date.today().strftime("%Y-%m-%d")
- header_text = []
- title_template = "What's New in {} {!s}\n"
- title_line = title_template.format(SOFTWARE_NAME, release)
- title_underline = ("*" * (len(title_line) - 1)) + "\n"
- header_text.append(title_line)
- header_text.append(title_underline)
- header_text.append("\n")
- header_text.append(":Release: {!s}\n".format(release))
- header_text.append(":Date: {}\n".format(isodatestamp))
- header_text.append("\n")
- description_template = (
- "This document explains the new/changed features "
- "of {} in version {!s}\n"
- )
- header_text.append(description_template.format(SOFTWARE_NAME, release))
- header_text.append("(:doc:`View all changes `.)")
- header_text.append("\n")
- return header_text
-
-
-def read_directory(directory_path):
- """Parse the items in a specified directory and return their metadata."""
- directory_contents = os.listdir(directory_path)
- compilable_files_unsorted = []
- misnamed_files = []
- for file_name in directory_contents:
- try:
- category, isodate, date_of_item = _decode_contribution_filename(
- file_name
- )
- except (AttributeError, ValueError):
- misnamed_files.append(file_name)
- continue
- compilable_files_unsorted.append(
- {"Category": category, "Date": date_of_item, "FileName": file_name}
- )
- compilable_files = sorted(
- compilable_files_unsorted, key=itemgetter("Date"), reverse=True
- )
- if misnamed_files:
- msg = "Found contribution file(s) with unexpected names :"
- for filename in misnamed_files:
- full_path = os.path.join(directory_path, filename)
- msg += "\n {}".format(full_path)
- warnings.warn(msg, UserWarning)
-
- return compilable_files
-
-
-def compile_directory(directory, release, unreleased=False):
- """Read in source files in date order and compile the text into a list."""
- if unreleased:
- release = ""
- source_text = read_directory(directory)
- compiled_text = []
- header_text = generate_header(release, unreleased)
- compiled_text.extend(header_text)
- for count, category in enumerate(VALID_CATEGORIES):
- category_text = []
- subtitle_line = ""
- if count == 0:
- subtitle_line += "{} {!s} ".format(SOFTWARE_NAME, release)
- subtitle_line += category["Title"] + "\n"
- subtitle_underline = ("=" * (len(subtitle_line) - 1)) + "\n"
- category_text.append("\n")
- category_text.append(subtitle_line)
- category_text.append(subtitle_underline)
- category_items = [
- item
- for item in source_text
- if item["Category"] == category["Prefix"]
- ]
- if not category_items:
- continue
- for file_description in category_items:
- entry_path = os.path.join(directory, file_description["FileName"])
- with open(entry_path, "r") as content_object:
- text = content_object.readlines()
- if not text[-1].endswith("\n"):
- text[-1] += "\n"
- category_text.extend(text)
- category_text.append("\n----\n\n")
- compiled_text.extend(category_text)
- return compiled_text
-
-
-def check_all_contributions_valid(release=None, quiet=False, unreleased=False):
- """"Scan the contributions directory for badly-named files."""
- root_directory = _self_root_directory()
- # Check there are *some* contributions directory(s), else silently pass.
- contribs_spec = os.path.join(root_directory, RELEASEDIR_PREFIX + "*")
- if len(glob(contribs_spec)) > 0:
- # There are some contributions directories: check latest / specified.
- if release is None:
- release = get_latest_release()
- if not quiet:
- msg = 'Checking whatsnew contributions for release "{!s}".'
- print(msg.format(release))
- release_directory = find_release_directory(
- root_directory, release, fail_on_existing=False
- )
- # Run the directory scan, but convert any warning into an error.
- with warnings.catch_warnings():
- warnings.simplefilter("error")
- compile_directory(release_directory, release, unreleased)
- if not quiet:
- print("done.")
-
-
-def run_compilation(release=None, quiet=False, unreleased=False):
- """Write a draft release.rst file given a specified uncompiled release."""
- if release is None:
- # This must exist !
- release = get_latest_release()
- if not quiet:
- msg = 'Building release document for release "{!s}".'
- print(msg.format(release))
- root_directory = _self_root_directory()
- release_directory = find_release_directory(root_directory, release)
- compiled_text = compile_directory(release_directory, release, unreleased)
- if unreleased:
- compiled_filename = "latest" + EXTENSION
- else:
- compiled_filename = str(release) + EXTENSION
- compiled_filepath = os.path.join(root_directory, compiled_filename)
- with open(compiled_filepath, "w") as output_object:
- for string_line in compiled_text:
- output_object.write(string_line)
- if not quiet:
- print("done.")
-
-
-if __name__ == "__main__":
- PARSER = argparse.ArgumentParser()
- PARSER.add_argument(
- "release",
- help="Release number to be compiled",
- nargs="?",
- type=version.StrictVersion,
- )
- PARSER.add_argument(
- "-c",
- "--checkonly",
- action="store_true",
- help="Check contribution file names, do not build.",
- )
- PARSER.add_argument(
- "-u",
- "--unreleased",
- action="store_true",
- help=(
- "Label the release version as '', "
- "and its date as ''."
- ),
- )
- PARSER.add_argument(
- "-q",
- "--quiet",
- action="store_true",
- help="Do not print progress messages.",
- )
- ARGUMENTS = PARSER.parse_args()
- release = ARGUMENTS.release
- unreleased = ARGUMENTS.unreleased
- quiet = ARGUMENTS.quiet
- if ARGUMENTS.checkonly:
- check_all_contributions_valid(
- release, quiet=quiet, unreleased=unreleased
- )
- else:
- run_compilation(release, quiet=quiet, unreleased=unreleased)
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Dec-02_cell_measure_concatenate.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Dec-02_cell_measure_concatenate.txt
deleted file mode 100644
index 151341d9af..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Dec-02_cell_measure_concatenate.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-* Concatenating cubes along an axis shared by cell measures would cause concatenation to inappropriately fail.
- These cell measures are now concatenated together in the resulting cube.
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-14_cell_measure_positional_argument.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-14_cell_measure_positional_argument.txt
deleted file mode 100644
index d43b5c2d44..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-14_cell_measure_positional_argument.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-* A :class:`iris.coords.CellMeasure` requires a string ``measure`` attribute to be defined, which can only have a value
- of ``area`` or ``volume``. Previously, the ``measure`` was provided as a keyword argument to
- :class:`~iris.coords.CellMeasure` with an default value of ``None``, which caused a ``TypeError`` when no
- ``measure`` was provided. The default value of ``area`` is now used.
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-19_cell_measure_copy_loss.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-19_cell_measure_copy_loss.txt
deleted file mode 100644
index 3a0bbfaf56..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-19_cell_measure_copy_loss.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-* Copying a cube would previously ignore any attached class:`iris.coords.CellMeasure`.
- These are now copied over.
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2020-Feb-13_cube_iter_remove.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2020-Feb-13_cube_iter_remove.txt
deleted file mode 100644
index 082cd8acc8..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2020-Feb-13_cube_iter_remove.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-* The `__iter__()` method in class:`iris.cube.Cube` was set to `None`.
- `TypeError` is still raised if a `Cube` is iterated over but
- `isinstance(cube, collections.Iterable)` now behaves as expected.
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-11_remove_LBProc_flag_attributes.txt b/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-11_remove_LBProc_flag_attributes.txt
deleted file mode 100644
index 56c1435316..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-11_remove_LBProc_flag_attributes.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-* :attr:`iris.fileformats.pp.PPField.lbproc` is now an `int`. The
- deprecated attributes `flag1`, `flag2` etc. have been removed from it.
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-14_remove_deprecated_future_flags.txt b/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-14_remove_deprecated_future_flags.txt
deleted file mode 100644
index 3bf515187b..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/deprecate_2019-Oct-14_remove_deprecated_future_flags.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-* The deprecated :class:`iris.Future` flags `cell_date_time_objects`,
- `netcdf_promote`, `netcdf_no_unlimited` and `clip_latitudes` have
- been removed.
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/docchange_2019-Dec-04_black_code_formatting.txt b/docs/iris/src/whatsnew/contributions_3.0.0/docchange_2019-Dec-04_black_code_formatting.txt
deleted file mode 100644
index 500a215bb9..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/docchange_2019-Dec-04_black_code_formatting.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-* Added support for the `black `_ code formatter.
- This is now automatically checked on GitHub PRs, replacing the older, unittest-based
- "iris.tests.test_coding_standards.TestCodeFormat".
- Black provides automatic code format correction for most IDEs.
- See the new developer guide section on this :
- https://scitools-docs.github.io/iris/master/developers_guide/code_format.html.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-12_remove_experimental_concatenate_module.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-12_remove_experimental_concatenate_module.txt
deleted file mode 100644
index 418377aabc..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-12_remove_experimental_concatenate_module.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-* The :mod:`iris.experimental.concatenate` module has now been removed. In ``v1.6.0`` the experimental `concatenate`
- functionality was moved to the :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the
- :func:`iris.experimental.concatenate.concatenate` function raised an exception.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-13_move_experimental_equalise_cubes.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-13_move_experimental_equalise_cubes.txt
deleted file mode 100644
index a7ddaa441b..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-13_move_experimental_equalise_cubes.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-* The :func:`iris.experimental.equalise_cubes.equalise_attributes` function has been moved from the
- :mod:`iris.experimental` module into the :mod:`iris.util` module. Please use the :func:`iris.util.equalise_attributes`
- function instead.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-26_remove_coord_comparison.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-26_remove_coord_comparison.txt
deleted file mode 100644
index a8ba4131d0..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2019-Nov-26_remove_coord_comparison.txt
+++ /dev/null
@@ -1 +0,0 @@
-* The former function "iris.analysis.coord_comparison" has been removed.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-15_change_default_unit_loading.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-15_change_default_unit_loading.txt
new file mode 100644
index 0000000000..be048990f3
--- /dev/null
+++ b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-15_change_default_unit_loading.txt
@@ -0,0 +1 @@
+* When loading data from netcdf-CF files, where a variable has no "units" property, the corresponding Iris object will have "units='unknown'". Prior to Iris 3.0, these cases defaulted to "units='1'".
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-22_cubelist_extract_cubes.txt b/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-22_cubelist_extract_cubes.txt
deleted file mode 100644
index ed8e6a8e2c..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/incompatiblechange_2020-May-22_cubelist_extract_cubes.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-* The method :meth:`~iris.cube.CubeList.extract_strict`, and the 'strict'
- keyword to :meth:`~iris.cube.CubeList.extract` method have been removed, and
- are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube` and
- :meth:`~iris.cube.CubeList.extract_cubes`.
- The new routines perform the same operation, but in a style more like other
- Iris functions such as :meth:`iris.load_cube` and :meth:`iris.load_cubes`.
- Unlike 'strict extraction', the type of return value is now completely
- consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a cube,
- and :meth:`~iris.cube.CubeList.extract_cubes` always returns a CubeList of a
- length equal to the number of constraints.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Dec-20_cache_area_weights.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Dec-20_cache_area_weights.txt
deleted file mode 100644
index 8c9b7b95d2..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Dec-20_cache_area_weights.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-* The area weights used when performing area weighted regridding with :class:`iris.analysis.AreaWeighted`
- are now cached.
- This allows a significant speedup when regridding multiple similar cubes, by repeatedly using
- a `'regridder' object <../iris/iris/analysis.html?highlight=regridder#iris.analysis.AreaWeighted.regridder>`_
- which you created first.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-27_cell_measure_statistics.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-27_cell_measure_statistics.txt
deleted file mode 100644
index cf8c83e594..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-27_cell_measure_statistics.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-* Statistical operations :meth:`iris.cube.Cube.collapsed`,
- :meth:`iris.cube.Cube.aggregated_by` and :meth:`iris.cube.Cube.rolling_window`
- previously removed every :class:`iris.coord.CellMeasure` attached to the cube.
- Now, a :class:`iris.coord.CellMeasure` will only be removed if it is associated
- with an axis over which the statistic is being run.
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-14_cf_ancillary_data.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-14_cf_ancillary_data.txt
deleted file mode 100644
index ea70702f38..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-14_cf_ancillary_data.txt
+++ /dev/null
@@ -1 +0,0 @@
-* CF Ancillary Variables are now supported in cubes.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_nameconstraint.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_nameconstraint.txt
deleted file mode 100644
index eeb40990e2..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_nameconstraint.txt
+++ /dev/null
@@ -1 +0,0 @@
-* The :class:`~iris.NameConstraint` provides richer name constraint matching when loading or extracting against cubes, by supporting a constraint against any combination of ``standard_name``, ``long_name``, NetCDF ``var_name`` and ``STASH`` from the attributes dictionary of a :class:`~iris.cube.Cube`.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_names_property.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_names_property.txt
deleted file mode 100644
index a092631152..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_names_property.txt
+++ /dev/null
@@ -1 +0,0 @@
-* Cubes and coordinates now have a new ``names`` property that contains a tuple of the ``standard_name``, ``long_name``, NetCDF ``var_name``, and ``STASH`` attributes metadata.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_relaxed_name_loading.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_relaxed_name_loading.txt
deleted file mode 100644
index 6773ac28b1..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-15_relaxed_name_loading.txt
+++ /dev/null
@@ -1 +0,0 @@
-* Name constraint matching against cubes during loading or extracting has been relaxed from strictly matching against the :meth:`~iris.cube.Cube.name`, to matching against either the ``standard_name``, ``long_name``, NetCDF ``var_name``, or ``STASH`` attributes metadata of a cube.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-17_unpin_mpl.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-17_unpin_mpl.txt
deleted file mode 100644
index bbee87037a..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Oct-17_unpin_mpl.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-* Supporting Iris for both Python2 and Python3 resulted in pinning our dependency on matplotlib at v2.x.
- Now that Python2 support has been dropped, Iris is free to use the latest version of matplotlib.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-06_relax_geostationary.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-06_relax_geostationary.txt
deleted file mode 100644
index e1113c838c..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-06_relax_geostationary.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-* :class:`iris.coord_systems.Geostationary` can now accept creation arguments of
- `false_easting=None` or `false_northing=None`, equivalent to values of 0.
- Previously these kwargs could be omitted, but could not be set to `None`.
- This also enables loading netcdf data on a Geostationary grid, where either of these
- keys is not present as a grid-mapping variable property : Previously, loading any
- such data caused an exception.
diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-31_nimrod_format_enhancement.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-31_nimrod_format_enhancement.txt
deleted file mode 100644
index 454fc3617f..0000000000
--- a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2020-Jan-31_nimrod_format_enhancement.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-* The :class:`~iris.fileformats.nimrod` provides richer meta-data translation
-when loading Nimrod-format data into cubes. This covers most known operational
-use-cases.
diff --git a/docs/iris/src/whatsnew/index.rst b/docs/iris/src/whatsnew/index.rst
index 03834a43a7..a574e7a689 100644
--- a/docs/iris/src/whatsnew/index.rst
+++ b/docs/iris/src/whatsnew/index.rst
@@ -6,11 +6,11 @@ What's new in Iris
These "What's new" pages describe the important changes between major
Iris versions.
+
.. toctree::
- :maxdepth: 2
+ :maxdepth: 1
latest.rst
- 3.0.rst
2.4.rst
2.3.rst
2.2.rst
diff --git a/docs/iris/src/whatsnew/latest.rst b/docs/iris/src/whatsnew/latest.rst
new file mode 100644
index 0000000000..a32aca6d5f
--- /dev/null
+++ b/docs/iris/src/whatsnew/latest.rst
@@ -0,0 +1,126 @@
+
+************
+
+This document explains the changes made to Iris for this release
+(:doc:`View all changes `.)
+
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
+
+* The :mod:`~iris.fileformats.nimrod` module provides richer meta-data translation
+ when loading ``Nimrod`` data into cubes. This covers most known
+ operational use-cases.
+
+* Statistical operations :meth:`iris.cube.Cube.collapsed`,
+ :meth:`iris.cube.Cube.aggregated_by` and :meth:`iris.cube.Cube.rolling_window`
+ previously removed every :class:`iris.coord.CellMeasure` attached to the
+ cube. Now, a :class:`iris.coord.CellMeasure` will only be removed if it is
+ associated with an axis over which the statistic is being run.
+
+* Supporting ``Iris`` for both ``Python2`` and ``Python3`` resulted in pinning our
+ dependency on `matplotlib`_ at ``v2.x``. Now that ``Python2`` support has
+ been dropped, ``Iris`` is free to use the latest version of `matplotlib`_.
+
+* `CF Ancillary Data`_ variables are now supported.
+
+
+Bugs Fixed
+==========
+
+* The method :meth:`~iris.Cube.cube.remove_coord` would fail to remove derived
+ coordinates, will now remove derived coordinates by removing aux_factories.
+
+* The ``__iter__()`` method in :class:`~iris.cube.Cube` was set to ``None``.
+ ``TypeError`` is still raised if a :class:`~iris.cube.Cube` is iterated over
+ but ``isinstance(cube, collections.Iterable)`` now behaves as expected.
+
+* Concatenating cubes along an axis shared by cell measures would cause
+ concatenation to inappropriately fail. These cell measures are now
+ concatenated together in the resulting cube.
+
+* Copying a cube would previously ignore any attached
+ :class:`~iris.coords.CellMeasure`. These are now copied over.
+
+* A :class:`~iris.coords.CellMeasure` requires a string ``measure`` attribute
+ to be defined, which can only have a value of ``area`` or ``volume``.
+ Previously, the ``measure`` was provided as a keyword argument to
+ :class:`~iris.coords.CellMeasure` with an default value of ``None``, which
+ caused a ``TypeError`` when no ``measure`` was provided. The default value
+ of ``area`` is now used.
+
+
+Incompatible Changes
+====================
+
+* The method :meth:`~iris.cube.CubeList.extract_strict`, and the ``strict``
+ keyword to :meth:`~iris.cube.CubeList.extract` method have been removed, and
+ are replaced by the new routines :meth:`~iris.cube.CubeList.extract_cube` and
+ :meth:`~iris.cube.CubeList.extract_cubes`.
+ The new routines perform the same operation, but in a style more like other
+ ``Iris`` functions such as :meth:`~iris.load_cube` and :meth:`~iris.load_cubes`.
+ Unlike ``strict`` extraction, the type of return value is now completely
+ consistent : :meth:`~iris.cube.CubeList.extract_cube` always returns a
+ :class:`~iris.cube.Cube`, and :meth:`~iris.cube.CubeList.extract_cubes`
+ always returns an :class:`iris.cube.CubeList` of a length equal to the
+ number of constraints.
+
+* The former function ``iris.analysis.coord_comparison`` has been removed.
+
+* The :func:`iris.experimental.equalise_cubes.equalise_attributes` function
+ has been moved from the :mod:`iris.experimental` module into the
+ :mod:`iris.util` module. Please use the :func:`iris.util.equalise_attributes`
+ function instead.
+
+* The :mod:`iris.experimental.concatenate` module has now been removed. In
+ ``v1.6.0`` the experimental ``concatenate`` functionality was moved to the
+ :meth:`iris.cube.CubeList.concatenate` method. Since then, calling the
+ :func:`iris.experimental.concatenate.concatenate` function raised an
+ exception.
+
+
+Deprecations
+============
+
+* The deprecated :class:`iris.Future` flags ``cell_date_time_objects``,
+ ``netcdf_promote``, ``netcdf_no_unlimited`` and ``clip_latitudes`` have
+ been removed.
+
+* :attr:`iris.fileformats.pp.PPField.lbproc` is now an ``int``. The
+ deprecated attributes ``flag1``, ``flag2`` etc. have been removed from it.
+
+
+Documentation
+=============
+
+* Moved the :ref:`sphx_glr_generated_gallery_oceanography_plot_orca_projection.py`
+ from the general part of the gallery to oceanography.
+
+* Updated documentation to use a modern sphinx theme and be served from
+ https://scitools-iris.readthedocs.io/en/latest/.
+
+* Added support for the `black `_ code
+ formatter. This is now automatically checked on GitHub PRs, replacing the
+ older, unittest-based "iris.tests.test_coding_standards.TestCodeFormat".
+ Black provides automatic code format correction for most IDEs. See the new
+ developer guide section on :ref:`iris_code_format`.
+
+* Refreshed the :ref:`whats_new_contributions` for the :ref:`iris_whatsnew`.
+ This includes always creating the ``latest`` what's new page so it appears
+ on the latest documentation at
+ https://scitools-iris.readthedocs.io/en/latest/whatsnew. This resolves
+ :issue:`2104` and :issue:`3451`. Also updated the
+ :ref:`iris_development_releases_steps` to follow when making a release.
+
+* Enabled the PDF creation of the documentation on the `Read the Docs`_ service.
+ The PDF may be accessed by clicking on the version at the bottom of the side
+ bar, then selecting ``PDF`` from the ``Downloads`` section.
+
+.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/
+.. _matplotlib: https://matplotlib.org/
+.. _CF Ancillary Data: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#ancillary-data
\ No newline at end of file
diff --git a/docs/iris/src/whatsnew/latest.rst.template b/docs/iris/src/whatsnew/latest.rst.template
new file mode 100644
index 0000000000..82f87d9e5a
--- /dev/null
+++ b/docs/iris/src/whatsnew/latest.rst.template
@@ -0,0 +1,46 @@
+
+************
+
+This document explains the changes made to Iris for this release
+(:doc:`View all changes `.)
+
+
+.. contents:: Skip to section:
+ :local:
+ :depth: 3
+
+
+Features
+========
+
+* N/A
+
+
+Bugs Fixed
+==========
+
+* N/A
+
+
+Incompatible Changes
+====================
+
+* N/A
+
+
+Dependencies
+============
+
+* N/A
+
+
+Internal
+========
+
+* N/A
+
+
+Documentation
+=============
+
+* N/A
\ No newline at end of file
diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py
index 646613d114..6bda3aa274 100644
--- a/lib/iris/_concatenate.py
+++ b/lib/iris/_concatenate.py
@@ -9,7 +9,6 @@
"""
from collections import defaultdict, namedtuple
-from copy import deepcopy
import dask.array as da
import numpy as np
@@ -69,7 +68,7 @@ class _CoordMetaData(
Args:
* defn:
- The :class:`iris.coords.CoordDefn` metadata that represents a
+ The :class:`iris.common.CoordMetadata` metadata that represents a
coordinate.
* dims:
@@ -86,7 +85,7 @@ class _CoordMetaData(
"""
- def __new__(cls, coord, dims):
+ def __new__(mcs, coord, dims):
"""
Create a new :class:`_CoordMetaData` instance.
@@ -102,7 +101,7 @@ def __new__(cls, coord, dims):
The new class instance.
"""
- defn = coord._as_defn()
+ defn = coord.metadata
points_dtype = coord.points.dtype
bounds_dtype = coord.bounds.dtype if coord.bounds is not None else None
kwargs = {}
@@ -121,7 +120,7 @@ def __new__(cls, coord, dims):
order = _DECREASING
kwargs["order"] = order
metadata = super().__new__(
- cls, defn, dims, points_dtype, bounds_dtype, kwargs
+ mcs, defn, dims, points_dtype, bounds_dtype, kwargs
)
return metadata
@@ -195,7 +194,7 @@ def __new__(cls, ancil, dims):
The new class instance.
"""
- defn = ancil._as_defn()
+ defn = ancil.metadata
metadata = super().__new__(cls, defn, dims)
return metadata
@@ -404,11 +403,11 @@ def __init__(self, cube):
axes = dict(T=0, Z=1, Y=2, X=3)
# Coordinate sort function - by guessed coordinate axis, then
- # by coordinate definition, then by dimensions, in ascending order.
+ # by coordinate name, then by dimensions, in ascending order.
def key_func(coord):
return (
axes.get(guess_coord_axis(coord), len(axes) + 1),
- coord._as_defn(),
+ coord.name(),
cube.coord_dims(coord),
)
@@ -423,7 +422,7 @@ def key_func(coord):
self.scalar_coords.append(coord)
def meta_key_func(dm):
- return (dm._as_defn(), dm.cube_dims(cube))
+ return (dm.metadata, dm.cube_dims(cube))
for cm in sorted(cube.cell_measures(), key=meta_key_func):
dims = cube.cell_measure_dims(cm)
@@ -991,6 +990,9 @@ def _build_aux_coordinates(self):
points, bounds=bnds, **kwargs
)
except ValueError:
+ # Ensure to remove the "circular" kwarg, which may be
+ # present in the defn of a DimCoord being demoted.
+ _ = kwargs.pop("circular", None)
coord = iris.coords.AuxCoord(
points, bounds=bnds, **kwargs
)
diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py
index 37daeec4aa..0f6a8ab6c6 100644
--- a/lib/iris/_constraints.py
+++ b/lib/iris/_constraints.py
@@ -131,7 +131,7 @@ def _coordless_match(self, cube):
if self._name:
# Require to also check against cube.name() for the fallback
# "unknown" default case, when there is no name metadata available.
- match = self._name in cube.names or self._name == cube.name()
+ match = self._name in cube._names or self._name == cube.name()
if match and self._cube_func:
match = self._cube_func(cube)
return match
@@ -515,6 +515,7 @@ def __init__(
match.
Kwargs:
+
* standard_name:
A string or callable representing the standard name to match
against.
@@ -534,6 +535,7 @@ def __init__(
where the standard_name is not set, then use standard_name=None.
Returns:
+
* Boolean
Example usage::
@@ -544,8 +546,8 @@ def __init__(
iris.NameConstraint(standard_name='air_temperature',
STASH=lambda stash: stash.item == 203)
-
"""
+
self.standard_name = standard_name
self.long_name = long_name
self.var_name = var_name
diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py
index 9ea07e54b2..ed6dd784f2 100644
--- a/lib/iris/_merge.py
+++ b/lib/iris/_merge.py
@@ -22,8 +22,9 @@
is_lazy_data,
multidim_lazy_stack,
)
-import iris.cube
import iris.coords
+from iris.common import CoordMetadata, CubeMetadata
+import iris.cube
import iris.exceptions
import iris.util
@@ -115,7 +116,7 @@ class _ScalarCoordPayload(
Args:
* defns:
- A list of scalar coordinate definitions :class:`iris.coords.CoordDefn`
+ A list of scalar coordinate metadata :class:`iris.common.CoordMetadata`
belonging to a :class:`iris.cube.Cube`.
* values:
@@ -1478,9 +1479,7 @@ def axis_and_name(name):
)
else:
bounds = None
- kwargs = dict(
- zip(iris.coords.CoordDefn._fields, defns[name])
- )
+ kwargs = dict(zip(CoordMetadata._fields, defns[name]))
kwargs.update(metadata[name].kwargs)
def name_in_independents():
@@ -1560,7 +1559,7 @@ def name_in_independents():
if bounds is not None:
bounds[index] = name_value.bound
- kwargs = dict(zip(iris.coords.CoordDefn._fields, defns[name]))
+ kwargs = dict(zip(CoordMetadata._fields, defns[name]))
self._aux_templates.append(
_Template(dims, points, bounds, kwargs)
)
@@ -1594,7 +1593,7 @@ def _get_cube(self, data):
(deepcopy(coord), dims)
for coord, dims in self._aux_coords_and_dims
]
- kwargs = dict(zip(iris.cube.CubeMetadata._fields, signature.defn))
+ kwargs = dict(zip(CubeMetadata._fields, signature.defn))
cms_and_dims = [
(deepcopy(cm), dims) for cm, dims in self._cell_measures_and_dims
@@ -1794,7 +1793,7 @@ def _extract_coord_payload(self, cube):
# Coordinate sort function.
# NB. This makes use of two properties which don't end up in
- # the CoordDefn used by scalar_defns: `coord.points.dtype` and
+ # the metadata used by scalar_defns: `coord.points.dtype` and
# `type(coord)`.
def key_func(coord):
points_dtype = coord.dtype
@@ -1805,14 +1804,14 @@ def key_func(coord):
axis_dict.get(
iris.util.guess_coord_axis(coord), len(axis_dict) + 1
),
- coord._as_defn(),
+ coord.metadata,
)
# Order the coordinates by hints, axis, and definition.
for coord in sorted(coords, key=key_func):
if not cube.coord_dims(coord) and coord.shape == (1,):
# Extract the scalar coordinate data and metadata.
- scalar_defns.append(coord._as_defn())
+ scalar_defns.append(coord.metadata)
# Because we know there's a single Cell in the
# coordinate, it's quicker to roll our own than use
# Coord.cell().
@@ -1844,14 +1843,14 @@ def key_func(coord):
factory_defns = []
for factory in sorted(
- cube.aux_factories, key=lambda factory: factory._as_defn()
+ cube.aux_factories, key=lambda factory: factory.metadata
):
dependency_defns = []
dependencies = factory.dependencies
for key in sorted(dependencies):
coord = dependencies[key]
if coord is not None:
- dependency_defns.append((key, coord._as_defn()))
+ dependency_defns.append((key, coord.metadata))
factory_defn = _FactoryDefn(type(factory), dependency_defns)
factory_defns.append(factory_defn)
diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py
index 5b7dff813d..a1e56533fd 100644
--- a/lib/iris/analysis/__init__.py
+++ b/lib/iris/analysis/__init__.py
@@ -27,11 +27,11 @@
The gallery contains several interesting worked examples of how an
:class:`~iris.analysis.Aggregator` may be used, including:
- * :ref:`Meteorology-COP_1d_plot`
- * :ref:`General-SOI_filtering`
- * :ref:`Meteorology-hovmoller`
- * :ref:`Meteorology-lagged_ensemble`
- * :ref:`General-custom_aggregation`
+ * :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_1d.py`
+ * :ref:`sphx_glr_generated_gallery_general_plot_SOI_filtering.py`
+ * :ref:`sphx_glr_generated_gallery_meteorology_plot_hovmoller.py`
+ * :ref:`sphx_glr_generated_gallery_meteorology_plot_lagged_ensemble.py`
+ * :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py`
"""
@@ -319,7 +319,7 @@ def _dimensional_metadata_comparison(*cubes, object_get=None):
eq = (
other_coord is coord
or other_coord.name() == coord.name()
- and other_coord._as_defn() == coord._as_defn()
+ and other_coord.metadata == coord.metadata
)
if eq:
coord_to_add_to_group = other_coord
@@ -487,7 +487,8 @@ def __init__(
A variety of ready-made aggregators are provided in this module, such
as :data:`~iris.analysis.MEAN` and :data:`~iris.analysis.MAX`. Custom
aggregators can also be created for special purposes, see
- :ref:`General-custom_aggregation` for a worked example.
+ :ref:`sphx_glr_generated_gallery_general_plot_custom_aggregation.py`
+ for a worked example.
"""
#: Cube cell method string.
@@ -604,7 +605,7 @@ def update_metadata(self, cube, coords, **kwargs):
Kwargs:
- * This function is intended to be used in conjuction with aggregate()
+ * This function is intended to be used in conjunction with aggregate()
and should be passed the same keywords (for example, the "ddof"
keyword for a standard deviation aggregator).
@@ -802,7 +803,9 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs):
# order cube.
for point in points:
cube = collapsed_cube.copy()
- coord = iris.coords.AuxCoord(point, long_name=coord_name)
+ coord = iris.coords.AuxCoord(
+ point, long_name=coord_name, units="percent"
+ )
cube.add_aux_coord(coord)
cubes.append(cube)
@@ -980,7 +983,7 @@ def update_metadata(self, cube, coords, **kwargs):
Kwargs:
- * This function is intended to be used in conjuction with aggregate()
+ * This function is intended to be used in conjunction with aggregate()
and should be passed the same keywords (for example, the "ddof"
keyword for a standard deviation aggregator).
diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py
index 06f44dc951..7ff5430ca6 100644
--- a/lib/iris/analysis/_area_weighted.py
+++ b/lib/iris/analysis/_area_weighted.py
@@ -4,10 +4,7 @@
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
-import numpy as np
-
from iris.analysis._interpolation import get_xy_dim_coords, snapshot_grid
-import iris
import iris.experimental.regrid as eregrid
diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py
index c7f084bc1b..261c93e8ef 100644
--- a/lib/iris/analysis/_grid_angles.py
+++ b/lib/iris/analysis/_grid_angles.py
@@ -147,11 +147,11 @@ def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"):
connected by wraparound.
Input can be either two arrays, two coordinates, or a single cube
- containing two suitable coordinates identified with the 'x' and'y' axes.
+ containing two suitable coordinates identified with the 'x' and 'y' axes.
Args:
- The inputs (x [,y]) can be any of the folliwing :
+ The inputs (x [,y]) can be any of the following :
* x (:class:`~iris.cube.Cube`):
a grid cube with 2D X and Y coordinates, identified by 'axis'.
diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py
index 0670c073ae..71584f04c0 100644
--- a/lib/iris/analysis/_regrid.py
+++ b/lib/iris/analysis/_regrid.py
@@ -426,8 +426,7 @@ def _get_horizontal_coord(cube, axis):
if len(coords) != 1:
raise ValueError(
"Cube {!r} must contain a single 1D {} "
- "coordinate.".format(cube.name()),
- axis,
+ "coordinate.".format(cube.name(), axis)
)
return coords[0]
diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py
index 0de97b02f3..3a38b3b283 100644
--- a/lib/iris/analysis/maths.py
+++ b/lib/iris/analysis/maths.py
@@ -10,22 +10,27 @@
from functools import lru_cache
import inspect
+import logging
import math
import operator
import warnings
import cf_units
+import dask.array as da
import numpy as np
from numpy import ma
import iris.analysis
+from iris.common import SERVICES, Resolve
+from iris.common.lenient import _lenient_client
import iris.coords
import iris.cube
import iris.exceptions
import iris.util
-import dask.array as da
-from dask.array.core import broadcast_shapes
+
+# Configure the logger.
+logger = logging.getLogger(__name__)
@lru_cache(maxsize=128, typed=True)
@@ -115,7 +120,9 @@ def abs(cube, in_place=False):
_assert_is_cube(cube)
new_dtype = _output_dtype(np.abs, cube.dtype, in_place=in_place)
op = da.absolute if cube.has_lazy_data() else np.abs
- return _math_op_common(cube, op, cube.units, new_dtype, in_place=in_place)
+ return _math_op_common(
+ cube, op, cube.units, new_dtype=new_dtype, in_place=in_place
+ )
def intersection_of_cubes(cube, other_cube):
@@ -179,43 +186,7 @@ def _assert_is_cube(cube):
)
-def _assert_compatible(cube, other):
- """
- Checks to see if cube.data and another array can be broadcast to
- the same shape.
-
- """
- try:
- new_shape = broadcast_shapes(cube.shape, other.shape)
- except ValueError as err:
- # re-raise
- raise ValueError(
- "The array was not broadcastable to the cube's data "
- "shape. The error message when "
- "broadcasting:\n{}\nThe cube's shape was {} and the "
- "array's shape was {}".format(err, cube.shape, other.shape)
- )
-
- if cube.shape != new_shape:
- raise ValueError(
- "The array operation would increase the size or "
- "dimensionality of the cube. The new cube's data "
- "would have had to become: {}".format(new_shape)
- )
-
-
-def _assert_matching_units(cube, other, operation_name):
- """
- Check that the units of the cube and the other item are the same, or if
- the other does not have a unit, skip this test
- """
- if cube.units != getattr(other, "units", cube.units):
- msg = "Cannot use {!r} with differing units ({} & {})".format(
- operation_name, cube.units, other.units
- )
- raise iris.exceptions.NotYetImplementedError(msg)
-
-
+@_lenient_client(services=SERVICES)
def add(cube, other, dim=None, in_place=False):
"""
Calculate the sum of two cubes, or the sum of a cube and a
@@ -249,7 +220,10 @@ def add(cube, other, dim=None, in_place=False):
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(
- operator.add, cube.dtype, _get_dtype(other), in_place=in_place
+ operator.add,
+ cube.dtype,
+ second_dtype=_get_dtype(other),
+ in_place=in_place,
)
if in_place:
_inplace_common_checks(cube, other, "addition")
@@ -261,6 +235,7 @@ def add(cube, other, dim=None, in_place=False):
)
+@_lenient_client(services=SERVICES)
def subtract(cube, other, dim=None, in_place=False):
"""
Calculate the difference between two cubes, or the difference between
@@ -294,7 +269,10 @@ def subtract(cube, other, dim=None, in_place=False):
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(
- operator.sub, cube.dtype, _get_dtype(other), in_place=in_place
+ operator.sub,
+ cube.dtype,
+ second_dtype=_get_dtype(other),
+ in_place=in_place,
)
if in_place:
_inplace_common_checks(cube, other, "subtraction")
@@ -335,30 +313,15 @@ def _add_subtract_common(
"""
_assert_is_cube(cube)
- _assert_matching_units(cube, other, operation_name)
-
- if isinstance(other, iris.cube.Cube):
- # get a coordinate comparison of this cube and the cube to do the
- # operation with
- coord_comp = iris.analysis._dimensional_metadata_comparison(
- cube, other
- )
- bad_coord_grps = (
- coord_comp["ungroupable_and_dimensioned"]
- + coord_comp["resamplable"]
+ if cube.units != getattr(other, "units", cube.units):
+ emsg = (
+ f"Cannot use {operation_name!r} with differing units "
+ f"({cube.units} & {other.units})"
)
- if bad_coord_grps:
- raise ValueError(
- "This operation cannot be performed as there are "
- "differing coordinates (%s) remaining "
- "which cannot be ignored."
- % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps})
- )
- else:
- coord_comp = None
+ raise iris.exceptions.NotYetImplementedError(emsg)
- new_cube = _binary_op_common(
+ result = _binary_op_common(
operation_function,
operation_name,
cube,
@@ -369,17 +332,10 @@ def _add_subtract_common(
in_place=in_place,
)
- if coord_comp:
- # If a coordinate is to be ignored - remove it
- ignore = filter(
- None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]]
- )
- for coord in ignore:
- new_cube.remove_coord(coord)
-
- return new_cube
+ return result
+@_lenient_client(services=SERVICES)
def multiply(cube, other, dim=None, in_place=False):
"""
Calculate the product of a cube and another cube or coordinate.
@@ -403,38 +359,23 @@ def multiply(cube, other, dim=None, in_place=False):
"""
_assert_is_cube(cube)
+
new_dtype = _output_dtype(
- operator.mul, cube.dtype, _get_dtype(other), in_place=in_place
+ operator.mul,
+ cube.dtype,
+ second_dtype=_get_dtype(other),
+ in_place=in_place,
)
other_unit = getattr(other, "units", "1")
new_unit = cube.units * other_unit
+
if in_place:
_inplace_common_checks(cube, other, "multiplication")
op = operator.imul
else:
op = operator.mul
- if isinstance(other, iris.cube.Cube):
- # get a coordinate comparison of this cube and the cube to do the
- # operation with
- coord_comp = iris.analysis._dimensional_metadata_comparison(
- cube, other
- )
- bad_coord_grps = (
- coord_comp["ungroupable_and_dimensioned"]
- + coord_comp["resamplable"]
- )
- if bad_coord_grps:
- raise ValueError(
- "This operation cannot be performed as there are "
- "differing coordinates (%s) remaining "
- "which cannot be ignored."
- % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps})
- )
- else:
- coord_comp = None
-
- new_cube = _binary_op_common(
+ result = _binary_op_common(
op,
"multiply",
cube,
@@ -445,15 +386,7 @@ def multiply(cube, other, dim=None, in_place=False):
in_place=in_place,
)
- if coord_comp:
- # If a coordinate is to be ignored - remove it
- ignore = filter(
- None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]]
- )
- for coord in ignore:
- new_cube.remove_coord(coord)
-
- return new_cube
+ return result
def _inplace_common_checks(cube, other, math_op):
@@ -475,6 +408,7 @@ def _inplace_common_checks(cube, other, math_op):
)
+@_lenient_client(services=SERVICES)
def divide(cube, other, dim=None, in_place=False):
"""
Calculate the division of a cube by a cube or coordinate.
@@ -498,44 +432,29 @@ def divide(cube, other, dim=None, in_place=False):
"""
_assert_is_cube(cube)
+
new_dtype = _output_dtype(
- operator.truediv, cube.dtype, _get_dtype(other), in_place=in_place
+ operator.truediv,
+ cube.dtype,
+ second_dtype=_get_dtype(other),
+ in_place=in_place,
)
other_unit = getattr(other, "units", "1")
new_unit = cube.units / other_unit
+
if in_place:
if cube.dtype.kind in "iu":
# Cannot coerce float result from inplace division back to int.
- aemsg = (
- "Cannot perform inplace division of cube {!r} "
+ emsg = (
+ f"Cannot perform inplace division of cube {cube.name()!r} "
"with integer data."
)
- raise ArithmeticError(aemsg)
+ raise ArithmeticError(emsg)
op = operator.itruediv
else:
op = operator.truediv
- if isinstance(other, iris.cube.Cube):
- # get a coordinate comparison of this cube and the cube to do the
- # operation with
- coord_comp = iris.analysis._dimensional_metadata_comparison(
- cube, other
- )
- bad_coord_grps = (
- coord_comp["ungroupable_and_dimensioned"]
- + coord_comp["resamplable"]
- )
- if bad_coord_grps:
- raise ValueError(
- "This operation cannot be performed as there are "
- "differing coordinates (%s) remaining "
- "which cannot be ignored."
- % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps})
- )
- else:
- coord_comp = None
-
- new_cube = _binary_op_common(
+ result = _binary_op_common(
op,
"divide",
cube,
@@ -546,15 +465,7 @@ def divide(cube, other, dim=None, in_place=False):
in_place=in_place,
)
- if coord_comp:
- # If a coordinate is to be ignored - remove it
- ignore = filter(
- None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]]
- )
- for coord in ignore:
- new_cube.remove_coord(coord)
-
- return new_cube
+ return result
def exponentiate(cube, exponent, in_place=False):
@@ -585,7 +496,10 @@ def exponentiate(cube, exponent, in_place=False):
"""
_assert_is_cube(cube)
new_dtype = _output_dtype(
- operator.pow, cube.dtype, _get_dtype(exponent), in_place=in_place
+ operator.pow,
+ cube.dtype,
+ second_dtype=_get_dtype(exponent),
+ in_place=in_place,
)
if cube.has_lazy_data():
@@ -598,7 +512,11 @@ def power(data, out=None):
return np.power(data, exponent, out)
return _math_op_common(
- cube, power, cube.units ** exponent, new_dtype, in_place=in_place
+ cube,
+ power,
+ cube.units ** exponent,
+ new_dtype=new_dtype,
+ in_place=in_place,
)
@@ -628,7 +546,7 @@ def exp(cube, in_place=False):
new_dtype = _output_dtype(np.exp, cube.dtype, in_place=in_place)
op = da.exp if cube.has_lazy_data() else np.exp
return _math_op_common(
- cube, op, cf_units.Unit("1"), new_dtype, in_place=in_place
+ cube, op, cf_units.Unit("1"), new_dtype=new_dtype, in_place=in_place
)
@@ -654,7 +572,11 @@ def log(cube, in_place=False):
new_dtype = _output_dtype(np.log, cube.dtype, in_place=in_place)
op = da.log if cube.has_lazy_data() else np.log
return _math_op_common(
- cube, op, cube.units.log(math.e), new_dtype, in_place=in_place
+ cube,
+ op,
+ cube.units.log(math.e),
+ new_dtype=new_dtype,
+ in_place=in_place,
)
@@ -680,7 +602,7 @@ def log2(cube, in_place=False):
new_dtype = _output_dtype(np.log2, cube.dtype, in_place=in_place)
op = da.log2 if cube.has_lazy_data() else np.log2
return _math_op_common(
- cube, op, cube.units.log(2), new_dtype, in_place=in_place
+ cube, op, cube.units.log(2), new_dtype=new_dtype, in_place=in_place
)
@@ -706,12 +628,12 @@ def log10(cube, in_place=False):
new_dtype = _output_dtype(np.log10, cube.dtype, in_place=in_place)
op = da.log10 if cube.has_lazy_data() else np.log10
return _math_op_common(
- cube, op, cube.units.log(10), new_dtype, in_place=in_place
+ cube, op, cube.units.log(10), new_dtype=new_dtype, in_place=in_place
)
def apply_ufunc(
- ufunc, cube, other_cube=None, new_unit=None, new_name=None, in_place=False
+ ufunc, cube, other=None, new_unit=None, new_name=None, in_place=False
):
"""
Apply a `numpy universal function
@@ -735,7 +657,7 @@ def apply_ufunc(
Kwargs:
- * other_cube:
+ * other:
An instance of :class:`iris.cube.Cube` to be given as the second
argument to :func:`numpy.ufunc`.
@@ -758,51 +680,59 @@ def apply_ufunc(
"""
if not isinstance(ufunc, np.ufunc):
- name = getattr(ufunc, "__name__", "function passed to apply_ufunc")
-
- raise TypeError(
- "{} is not recognised (it is not an instance of "
- "numpy.ufunc)".format(name)
+ ufunc_name = getattr(
+ ufunc, "__name__", "function passed to apply_ufunc"
)
+ emsg = f"{ufunc_name} is not recognised, it is not an instance of numpy.ufunc"
+ raise TypeError(emsg)
+
+ ufunc_name = ufunc.__name__
if ufunc.nout != 1:
- raise ValueError(
- "{} returns {} objects, apply_ufunc currently "
- "only supports ufunc functions returning a single "
- "object.".format(ufunc.__name__, ufunc.nout)
+ emsg = (
+ f"{ufunc_name} returns {ufunc.nout} objects, apply_ufunc currently "
+ "only supports numpy.ufunc functions returning a single object."
)
+ raise ValueError(emsg)
- if ufunc.nin == 2:
- if other_cube is None:
- raise ValueError(
- "{} requires two arguments, so other_cube "
- "must also be passed to apply_ufunc".format(ufunc.__name__)
+ if ufunc.nin == 1:
+ if other is not None:
+ dmsg = (
+ "ignoring surplus 'other' argument to apply_ufunc, "
+ f"provided ufunc {ufunc_name!r} only requires 1 input"
)
+ logger.debug(dmsg)
- _assert_is_cube(other_cube)
+ new_dtype = _output_dtype(ufunc, cube.dtype, in_place=in_place)
+
+ new_cube = _math_op_common(
+ cube, ufunc, new_unit, new_dtype=new_dtype, in_place=in_place
+ )
+ elif ufunc.nin == 2:
+ if other is None:
+ emsg = (
+ f"{ufunc_name} requires two arguments, another cube "
+ "must also be passed to apply_ufunc."
+ )
+ raise ValueError(emsg)
+
+ _assert_is_cube(other)
new_dtype = _output_dtype(
- ufunc, cube.dtype, other_cube.dtype, in_place=in_place
+ ufunc, cube.dtype, second_dtype=other.dtype, in_place=in_place
)
new_cube = _binary_op_common(
ufunc,
- ufunc.__name__,
+ ufunc_name,
cube,
- other_cube,
+ other,
new_unit,
new_dtype=new_dtype,
in_place=in_place,
)
-
- elif ufunc.nin == 1:
- new_dtype = _output_dtype(ufunc, cube.dtype, in_place=in_place)
-
- new_cube = _math_op_common(
- cube, ufunc, new_unit, new_dtype, in_place=in_place
- )
-
else:
- raise ValueError(ufunc.__name__ + ".nin should be 1 or 2.")
+ emsg = f"Provided ufunc '{ufunc_name}.nin' must be 1 or 2."
+ raise ValueError(emsg)
new_cube.rename(new_name)
@@ -838,39 +768,63 @@ def _binary_op_common(
`cube` and `cube.data`
"""
_assert_is_cube(cube)
+
+ # Flag to notify the _math_op_common function to simply wrap the resultant
+ # data of the maths operation in a cube with no metadata.
+ skeleton_cube = False
+
if isinstance(other, iris.coords.Coord):
- other = _broadcast_cube_coord_data(cube, other, operation_name, dim)
+ # The rhs must be an array.
+ rhs = _broadcast_cube_coord_data(cube, other, operation_name, dim=dim)
elif isinstance(other, iris.cube.Cube):
- try:
- broadcast_shapes(cube.shape, other.shape)
- except ValueError:
- other = iris.util.as_compatible_shape(other, cube)
- other = other.core_data()
- else:
- other = np.asanyarray(other)
+ # Prepare to resolve the cube operands and associated coordinate
+ # metadata into the resultant cube.
+ resolver = Resolve(cube, other)
+
+ # Get the broadcast, auto-transposed safe versions of the cube operands.
+ cube = resolver.lhs_cube_resolved
+ other = resolver.rhs_cube_resolved
- # don't worry about checking for other data types (such as scalars or
- # np.ndarrays) because _assert_compatible validates that they are broadcast
- # compatible with cube.data
- _assert_compatible(cube, other)
+ # Flag that it's safe to wrap the resultant data of the math operation
+ # in a cube with no metadata, as all of the metadata of the resultant
+ # cube is being managed by the resolver.
+ skeleton_cube = True
- def unary_func(x):
- ret = operation_function(x, other)
- if ret is NotImplemented:
- # explicitly raise the TypeError, so it gets raised even if, for
+ # The rhs must be an array.
+ rhs = other.core_data()
+ else:
+ # The rhs must be an array.
+ rhs = np.asanyarray(other)
+
+ def unary_func(lhs):
+ data = operation_function(lhs, rhs)
+ if data is NotImplemented:
+ # Explicitly raise the TypeError, so it gets raised even if, for
# example, `iris.analysis.maths.multiply(cube, other)` is called
- # directly instead of `cube * other`
- raise TypeError(
- "cannot %s %r and %r objects"
- % (
- operation_function.__name__,
- type(x).__name__,
- type(other).__name__,
- )
+ # directly instead of `cube * other`.
+ emsg = (
+ f"Cannot {operation_function.__name__} {type(lhs).__name__!r} "
+ f"and {type(rhs).__name__} objects."
)
- return ret
+ raise TypeError(emsg)
+ return data
+
+ result = _math_op_common(
+ cube,
+ unary_func,
+ new_unit,
+ new_dtype=new_dtype,
+ in_place=in_place,
+ skeleton_cube=skeleton_cube,
+ )
- return _math_op_common(cube, unary_func, new_unit, new_dtype, in_place)
+ if isinstance(other, iris.cube.Cube):
+ # Insert the resultant data from the maths operation
+ # within the resolved cube.
+ result = resolver.cube(result.core_data(), in_place=in_place)
+ _sanitise_metadata(result, new_unit)
+
+ return result
def _broadcast_cube_coord_data(cube, other, operation_name, dim=None):
@@ -915,26 +869,64 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None):
return points
+def _sanitise_metadata(cube, unit):
+ """
+ As part of the maths metadata contract, clear the necessary or
+ unsupported metadata from the resultant cube of the maths operation.
+
+ """
+ # Clear the cube names.
+ cube.rename(None)
+
+ # Clear the cube cell methods.
+ cube.cell_methods = None
+
+ # Clear the cell measures.
+ for cm in cube.cell_measures():
+ cube.remove_cell_measure(cm)
+
+ # Clear the ancillary variables.
+ for av in cube.ancillary_variables():
+ cube.remove_ancillary_variable(av)
+
+ # Clear the STASH attribute, if present.
+ if "STASH" in cube.attributes:
+ del cube.attributes["STASH"]
+
+ # Set the cube units.
+ cube.units = unit
+
+
def _math_op_common(
- cube, operation_function, new_unit, new_dtype=None, in_place=False
+ cube,
+ operation_function,
+ new_unit,
+ new_dtype=None,
+ in_place=False,
+ skeleton_cube=False,
):
_assert_is_cube(cube)
- if in_place:
- new_cube = cube
+ if in_place and not skeleton_cube:
if cube.has_lazy_data():
- new_cube.data = operation_function(cube.lazy_data())
+ cube.data = operation_function(cube.lazy_data())
else:
try:
operation_function(cube.data, out=cube.data)
except TypeError:
- # Non ufunc function
+ # Non-ufunc function
operation_function(cube.data)
+ new_cube = cube
else:
- new_cube = cube.copy(data=operation_function(cube.core_data()))
+ data = operation_function(cube.core_data())
+ if skeleton_cube:
+ # Simply wrap the resultant data in a cube, as no
+ # cube metadata is required by the caller.
+ new_cube = iris.cube.Cube(data)
+ else:
+ new_cube = cube.copy(data)
- # If the result of the operation is scalar and masked, we need to fix up
- # the dtype
+ # If the result of the operation is scalar and masked, we need to fix-up the dtype.
if (
new_dtype is not None
and not new_cube.has_lazy_data()
@@ -943,8 +935,8 @@ def _math_op_common(
):
new_cube.data = ma.masked_array(0, 1, dtype=new_dtype)
- iris.analysis.clear_phenomenon_identity(new_cube)
- new_cube.units = new_unit
+ _sanitise_metadata(new_cube, new_unit)
+
return new_cube
@@ -965,12 +957,12 @@ def __init__(self, data_func, units_func):
are given as positional arguments. Should return another
data array, with the same shape as the first array.
- Can also have keyword arguments.
+ May also have keyword arguments.
* units_func:
- Function to calculate the unit of the resulting cube.
- Should take the cube(s) as input and return
+ Function to calculate the units of the resulting cube.
+ Should take the cube/s as input and return
an instance of :class:`cf_units.Unit`.
Returns:
@@ -1008,6 +1000,22 @@ def ws_units_func(u_cube, v_cube):
cs_cube = cs_ifunc(cube, axis=1)
"""
+ self._data_func_name = getattr(
+ data_func, "__name__", "data_func argument passed to IFunc"
+ )
+
+ if not callable(data_func):
+ emsg = f"{self._data_func_name} is not callable."
+ raise TypeError(emsg)
+
+ self._unit_func_name = getattr(
+ units_func, "__name__", "units_func argument passed to IFunc"
+ )
+
+ if not callable(units_func):
+ emsg = f"{self._unit_func_name} is not callable."
+ raise TypeError(emsg)
+
if hasattr(data_func, "nin"):
self.nin = data_func.nin
else:
@@ -1023,39 +1031,38 @@ def ws_units_func(u_cube, v_cube):
self.nin = len(args)
if self.nin not in [1, 2]:
- msg = (
- "{} requires {} input data arrays, the IFunc class "
- "currently only supports functions requiring 1 or two "
- "data arrays as input."
+ emsg = (
+ f"{self._data_func_name} requires {self.nin} input data "
+ "arrays, the IFunc class currently only supports functions "
+ "requiring 1 or 2 data arrays as input."
)
- raise ValueError(msg.format(data_func.__name__, self.nin))
+ raise ValueError(emsg)
if hasattr(data_func, "nout"):
if data_func.nout != 1:
- msg = (
- "{} returns {} objects, the IFunc class currently "
- "only supports functions returning a single object."
- )
- raise ValueError(
- msg.format(data_func.__name__, data_func.nout)
+ emsg = (
+ f"{self._data_func_name} returns {data_func.nout} objects, "
+ "the IFunc class currently only supports functions "
+ "returning a single object."
)
+ raise ValueError(emsg)
self.data_func = data_func
-
self.units_func = units_func
def __repr__(self):
- return "iris.analysis.maths.IFunc({}, {})".format(
- self.data_func.__name__, self.units_func.__name__
+ result = (
+ f"iris.analysis.maths.IFunc({self._data_func_name}, "
+ f"{self._unit_func_name})"
)
+ return result
def __str__(self):
- return (
- "IFunc constructed from the data function {} "
- "and the units function {}".format(
- self.data_func.__name__, self.units_func.__name__
- )
+ result = (
+ f"IFunc constructed from the data function {self._data_func_name} "
+ f"and the units function {self._unit_func_name}"
)
+ return result
def __call__(
self,
@@ -1105,11 +1112,27 @@ def wrap_data_func(*args, **kwargs):
return self.data_func(*args, **kwargs_combined)
- if self.nin == 2:
+ if self.nin == 1:
+ if other is not None:
+ dmsg = (
+ "ignoring surplus 'other' argument to IFunc.__call__, "
+ f"provided data_func {self._data_func_name!r} only requires "
+ "1 input"
+ )
+ logger.debug(dmsg)
+
+ new_unit = self.units_func(cube)
+
+ new_cube = _math_op_common(
+ cube, wrap_data_func, new_unit, in_place=in_place
+ )
+ else:
if other is None:
- raise ValueError(
- self.data_func.__name__ + " requires two arguments"
+ emsg = (
+ f"{self._data_func_name} requires two arguments, another "
+ "cube must also be passed to IFunc.__call__."
)
+ raise ValueError(emsg)
new_unit = self.units_func(cube, other)
@@ -1123,21 +1146,6 @@ def wrap_data_func(*args, **kwargs):
in_place=in_place,
)
- elif self.nin == 1:
- if other is not None:
- raise ValueError(
- self.data_func.__name__ + " requires one argument"
- )
-
- new_unit = self.units_func(cube)
-
- new_cube = _math_op_common(
- cube, wrap_data_func, new_unit, in_place=in_place
- )
-
- else:
- raise ValueError("self.nin should be 1 or 2.")
-
if new_name is not None:
new_cube.rename(new_name)
diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py
index ba3ed2504c..bb283a0e89 100644
--- a/lib/iris/analysis/stats.py
+++ b/lib/iris/analysis/stats.py
@@ -64,7 +64,7 @@ def pearsonr(
correlation at each time/altitude point.
Reference:
- http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation
+ https://en.wikipedia.org/wiki/Pearson_correlation_coefficient
This operation is non-lazy.
diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py
index 11148188fa..5b63ff53ed 100644
--- a/lib/iris/aux_factory.py
+++ b/lib/iris/aux_factory.py
@@ -14,7 +14,11 @@
import dask.array as da
import numpy as np
-from iris._cube_coord_common import CFVariableMixin
+from iris.common import (
+ CFVariableMixin,
+ CoordMetadata,
+ metadata_manager_factory,
+)
import iris.coords
@@ -33,14 +37,40 @@ class AuxCoordFactory(CFVariableMixin, metaclass=ABCMeta):
"""
def __init__(self):
+ # Configure the metadata manager.
+ if not hasattr(self, "_metadata_manager"):
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
+
#: Descriptive name of the coordinate made by the factory
self.long_name = None
#: netCDF variable name for the coordinate made by the factory
self.var_name = None
- #: Coordinate system (if any) of the coordinate made by the factory
self.coord_system = None
+ # See the climatological property getter.
+ self._metadata_manager.climatological = False
+
+ @property
+ def coord_system(self):
+ """
+ The coordinate-system (if any) of the coordinate made by the factory.
+
+ """
+ return self._metadata_manager.coord_system
+
+ @coord_system.setter
+ def coord_system(self, value):
+ self._metadata_manager.coord_system = value
+
+ @property
+ def climatological(self):
+ """
+ Always returns False, as a factory itself can never have points/bounds
+ and therefore can never be climatological by definition.
+
+ """
+ return self._metadata_manager.climatological
@property
@abstractmethod
@@ -51,20 +81,6 @@ def dependencies(self):
"""
- def _as_defn(self):
- defn = iris.coords.CoordDefn(
- self.standard_name,
- self.long_name,
- self.var_name,
- self.units,
- self.attributes,
- self.coord_system,
- # Slot for Coord 'climatological' property, which this
- # doesn't have.
- False,
- )
- return defn
-
@abstractmethod
def make_coord(self, coord_dims_func):
"""
@@ -372,6 +388,8 @@ def __init__(self, delta=None, sigma=None, orography=None):
The coordinate providing the `orog` term.
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
super().__init__()
if delta and delta.nbounds not in (0, 2):
@@ -395,21 +413,24 @@ def __init__(self, delta=None, sigma=None, orography=None):
self.standard_name = "altitude"
if delta is None and orography is None:
- raise ValueError(
- "Unable to determine units: no delta or orography"
- " available."
+ emsg = (
+ "Unable to determine units: no delta or orography "
+ "available."
)
+ raise ValueError(emsg)
if delta and orography and delta.units != orography.units:
- raise ValueError(
- "Incompatible units: delta and orography must"
- " have the same units."
+ emsg = (
+ "Incompatible units: delta and orography must have "
+ "the same units."
)
+ raise ValueError(emsg)
self.units = (delta and delta.units) or orography.units
if not self.units.is_convertible("m"):
- raise ValueError(
- "Invalid units: delta and/or orography"
- " must be expressed in length units."
+ emsg = (
+ "Invalid units: delta and/or orography must be expressed "
+ "in length units."
)
+ raise ValueError(emsg)
self.attributes = {"positive": "up"}
@property
@@ -556,10 +577,13 @@ def __init__(self, delta=None, sigma=None, surface_air_pressure=None):
The coordinate providing the `ps` term.
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
super().__init__()
# Check that provided coords meet necessary conditions.
self._check_dependencies(delta, sigma, surface_air_pressure)
+ self.units = (delta and delta.units) or surface_air_pressure.units
self.delta = delta
self.sigma = sigma
@@ -568,20 +592,12 @@ def __init__(self, delta=None, sigma=None, surface_air_pressure=None):
self.standard_name = "air_pressure"
self.attributes = {}
- @property
- def units(self):
- if self.delta is not None:
- units = self.delta.units
- else:
- units = self.surface_air_pressure.units
- return units
-
@staticmethod
def _check_dependencies(delta, sigma, surface_air_pressure):
# Check for sufficient coordinates.
if delta is None and (sigma is None or surface_air_pressure is None):
msg = (
- "Unable to contruct hybrid pressure coordinate factory "
+ "Unable to construct hybrid pressure coordinate factory "
"due to insufficient source coordinates."
)
raise ValueError(msg)
@@ -753,7 +769,7 @@ def __init__(
zlev=None,
):
"""
- Creates a ocean sigma over z coordinate factory with the formula:
+ Creates an ocean sigma over z coordinate factory with the formula:
if k < nsigma:
z(n, k, j, i) = eta(n, j, i) + sigma(k) *
@@ -766,10 +782,13 @@ def __init__(
either `eta`, or 'sigma' and `depth` and `depth_c` coordinates.
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
super().__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev)
+ self.units = zlev.units
self.sigma = sigma
self.eta = eta
@@ -781,16 +800,12 @@ def __init__(
self.standard_name = "sea_surface_height_above_reference_ellipsoid"
self.attributes = {"positive": "up"}
- @property
- def units(self):
- return self.zlev.units
-
@staticmethod
def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev):
# Check for sufficient factory coordinates.
if zlev is None:
raise ValueError(
- "Unable to determine units: " "no zlev coordinate available."
+ "Unable to determine units: no zlev coordinate available."
)
if nsigma is None:
raise ValueError("Missing nsigma coordinate.")
@@ -957,7 +972,7 @@ def make_coord(self, coord_dims_func):
Args:
* coord_dims_func:
- A callable which can return the list of dimesions relevant
+ A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
@@ -1068,10 +1083,13 @@ def __init__(self, sigma=None, eta=None, depth=None):
(depth(j, i) + eta(n, j, i))
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
super().__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(sigma, eta, depth)
+ self.units = depth.units
self.sigma = sigma
self.eta = eta
@@ -1080,10 +1098,6 @@ def __init__(self, sigma=None, eta=None, depth=None):
self.standard_name = "sea_surface_height_above_reference_ellipsoid"
self.attributes = {"positive": "up"}
- @property
- def units(self):
- return self.depth.units
-
@staticmethod
def _check_dependencies(sigma, eta, depth):
# Check for sufficient factory coordinates.
@@ -1252,10 +1266,13 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None):
S(k,j,i) = depth_c * s(k) + (depth(j,i) - depth_c) * C(k)
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
super().__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, c, eta, depth, depth_c)
+ self.units = depth.units
self.s = s
self.c = c
@@ -1266,10 +1283,6 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None):
self.standard_name = "sea_surface_height_above_reference_ellipsoid"
self.attributes = {"positive": "up"}
- @property
- def units(self):
- return self.depth.units
-
@staticmethod
def _check_dependencies(s, c, eta, depth, depth_c):
# Check for sufficient factory coordinates.
@@ -1476,10 +1489,13 @@ def __init__(
b * [tanh(a * (s(k) + 0.5)) / (2 * tanh(0.5*a)) - 0.5]
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
super().__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, eta, depth, a, b, depth_c)
+ self.units = depth.units
self.s = s
self.eta = eta
@@ -1491,10 +1507,6 @@ def __init__(
self.standard_name = "sea_surface_height_above_reference_ellipsoid"
self.attributes = {"positive": "up"}
- @property
- def units(self):
- return self.depth.units
-
@staticmethod
def _check_dependencies(s, eta, depth, a, b, depth_c):
# Check for sufficient factory coordinates.
@@ -1695,10 +1707,13 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None):
(depth_c + depth(j,i))
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
super().__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, c, eta, depth, depth_c)
+ self.units = depth.units
self.s = s
self.c = c
@@ -1709,10 +1724,6 @@ def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None):
self.standard_name = "sea_surface_height_above_reference_ellipsoid"
self.attributes = {"positive": "up"}
- @property
- def units(self):
- return self.depth.units
-
@staticmethod
def _check_dependencies(s, c, eta, depth, depth_c):
# Check for sufficient factory coordinates.
diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py
new file mode 100644
index 0000000000..c540d81bc0
--- /dev/null
+++ b/lib/iris/common/__init__.py
@@ -0,0 +1,11 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+
+
+from .lenient import *
+from .metadata import *
+from .mixin import *
+from .resolve import *
diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py
new file mode 100644
index 0000000000..802d854554
--- /dev/null
+++ b/lib/iris/common/lenient.py
@@ -0,0 +1,661 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+
+from collections.abc import Iterable
+from contextlib import contextmanager
+from copy import deepcopy
+from functools import wraps
+from inspect import getmodule
+import threading
+
+
+__all__ = [
+ "LENIENT",
+ "Lenient",
+]
+
+
+#: Default _Lenient services global activation state.
+_LENIENT_ENABLE_DEFAULT = True
+
+#: Default Lenient maths feature state.
+_LENIENT_MATHS_DEFAULT = True
+
+#: Protected _Lenient internal non-client, non-service keys.
+_LENIENT_PROTECTED = ("active", "enable")
+
+
+def _lenient_client(*dargs, services=None):
+ """
+ Decorator that allows a client function/method to declare at runtime that
+ it is executing and requires lenient behaviour from a prior registered
+ lenient service function/method.
+
+ This decorator supports being called with no arguments e.g.,
+
+ @_lenient_client()
+ def func():
+ pass
+
+ This is equivalent to using it as a simple naked decorator e.g.,
+
+ @_lenient_client
+ def func()
+ pass
+
+ Alternatively, this decorator supports the lenient client explicitly
+ declaring the lenient services that it wishes to use e.g.,
+
+ @_lenient_client(services=(service1, service2, ...)
+ def func():
+ pass
+
+ Args:
+
+ * dargs (tuple of callable):
+ A tuple containing the callable lenient client function/method to be
+ wrapped by the decorator. This is automatically populated by Python
+ through the decorator interface. No argument requires to be manually
+ provided.
+
+ Kwargs:
+
+ * services (callable or str or iterable of callable/str)
+ Zero or more function/methods, or equivalent fully qualified string names, of
+ lenient service function/methods.
+
+ Returns:
+ Closure wrapped function/method.
+
+ """
+ ndargs = len(dargs)
+
+ if ndargs:
+ assert (
+ ndargs == 1
+ ), f"Invalid lenient client arguments, expecting 1 got {ndargs}."
+ assert callable(
+ dargs[0]
+ ), "Invalid lenient client argument, expecting a callable."
+
+ assert not (
+ ndargs and services
+ ), "Invalid lenient client, got both arguments and keyword arguments."
+
+ if ndargs:
+ # The decorator has been used as a simple naked decorator.
+ (func,) = dargs
+
+ @wraps(func)
+ def lenient_client_inner_naked(*args, **kwargs):
+ """
+ Closure wrapper function to register the wrapped function/method
+ as active at runtime before executing it.
+
+ """
+ with _LENIENT.context(active=_qualname(func)):
+ result = func(*args, **kwargs)
+ return result
+
+ result = lenient_client_inner_naked
+ else:
+ # The decorator has been called with None, zero or more explicit lenient services.
+ if services is None:
+ services = ()
+
+ if isinstance(services, str) or not isinstance(services, Iterable):
+ services = (services,)
+
+ def lenient_client_outer(func):
+ @wraps(func)
+ def lenient_client_inner(*args, **kwargs):
+ """
+ Closure wrapper function to register the wrapped function/method
+ as active at runtime before executing it.
+
+ """
+ with _LENIENT.context(*services, active=_qualname(func)):
+ result = func(*args, **kwargs)
+ return result
+
+ return lenient_client_inner
+
+ result = lenient_client_outer
+
+ return result
+
+
+def _lenient_service(*dargs):
+ """
+ Decorator that allows a function/method to declare that it supports lenient
+ behaviour as a service.
+
+ Registration is at Python interpreter parse time.
+
+ The decorator supports being called with no arguments e.g.,
+
+ @_lenient_service()
+ def func():
+ pass
+
+ This is equivalent to using it as a simple naked decorator e.g.,
+
+ @_lenient_service
+ def func():
+ pass
+
+ Args:
+
+ * dargs (tuple of callable):
+ A tuple containing the callable lenient service function/method to be
+ wrapped by the decorator. This is automatically populated by Python
+ through the decorator interface. No argument requires to be manually
+ provided.
+
+ Returns:
+ Closure wrapped function/method.
+
+ """
+ ndargs = len(dargs)
+
+ if ndargs:
+ assert (
+ ndargs == 1
+ ), f"Invalid lenient service arguments, expecting 1 got {ndargs}."
+ assert callable(
+ dargs[0]
+ ), "Invalid lenient service argument, expecting a callable."
+
+ if ndargs:
+ # The decorator has been used as a simple naked decorator.
+ # Thus the (single) argument is a function to be wrapped.
+ # We just register the argument function as a lenient service, and
+ # return it unchanged
+ (func,) = dargs
+
+ _LENIENT.register_service(func)
+
+ # This decorator registers 'func': the func itself is unchanged.
+ result = func
+
+ else:
+ # The decorator has been called with no arguments.
+ # Return a decorator, to apply to 'func' immediately following.
+ def lenient_service_outer(func):
+ _LENIENT.register_service(func)
+
+ # Decorator registers 'func', but func itself is unchanged.
+ return func
+
+ result = lenient_service_outer
+
+ return result
+
+
+def _qualname(func):
+ """
+ Return the fully qualified function/method string name.
+
+ Args:
+
+ * func (callable):
+ Callable function/method. Non-callable arguments are simply
+ passed through.
+
+ .. note::
+ Inherited methods will be qualified with the base class that
+ defines the method.
+
+ """
+ result = func
+ if callable(func):
+ module = getmodule(func)
+ result = f"{module.__name__}.{func.__qualname__}"
+
+ return result
+
+
+class Lenient(threading.local):
+ def __init__(self, **kwargs):
+ """
+ A container for managing the run-time lenient features and options.
+
+ Kwargs:
+
+ * kwargs (dict)
+ Mapping of lenient key/value options to enable/disable. Note that,
+ only the lenient "maths" options is available, which controls
+ lenient/strict cube arithmetic.
+
+ For example::
+
+ Lenient(maths=False)
+
+ Note that, the values of these options are thread-specific.
+
+ """
+ # Configure the initial default lenient state.
+ self._init()
+
+ if not kwargs:
+ # If not specified, set the default behaviour of the maths lenient feature.
+ kwargs = dict(maths=_LENIENT_MATHS_DEFAULT)
+
+ # Configure the provided (or default) lenient features.
+ for feature, state in kwargs.items():
+ self[feature] = state
+
+ def __contains__(self, key):
+ return key in self.__dict__
+
+ def __getitem__(self, key):
+ if key not in self.__dict__:
+ cls = self.__class__.__name__
+ emsg = f"Invalid {cls!r} option, got {key!r}."
+ raise KeyError(emsg)
+ return self.__dict__[key]
+
+ def __repr__(self):
+ cls = self.__class__.__name__
+ msg = f"{cls}(maths={self.__dict__['maths']!r})"
+ return msg
+
+ def __setitem__(self, key, value):
+ cls = self.__class__.__name__
+
+ if key not in self.__dict__:
+ emsg = f"Invalid {cls!r} option, got {key!r}."
+ raise KeyError(emsg)
+
+ if not isinstance(value, bool):
+ emsg = f"Invalid {cls!r} option {key!r} value, got {value!r}."
+ raise ValueError(emsg)
+
+ self.__dict__[key] = value
+ # Toggle the (private) lenient behaviour.
+ _LENIENT.enable = value
+
+ def _init(self):
+ """Configure the initial default lenient state."""
+ # This is the only public supported lenient feature i.e., cube arithmetic
+ self.__dict__["maths"] = None
+
+ @contextmanager
+ def context(self, **kwargs):
+ """
+ Return a context manager which allows temporary modification of the
+ lenient option state within the scope of the context manager.
+
+ On entry to the context manager, all provided keyword arguments are
+ applied. On exit from the context manager, the previous lenient
+ option state is restored.
+
+ For example::
+ with iris.common.Lenient.context(maths=False):
+ pass
+
+ """
+
+ def configure_state(state):
+ for feature, value in state.items():
+ self[feature] = value
+
+ # Save the original state.
+ original_state = deepcopy(self.__dict__)
+
+ # Configure the provided lenient features.
+ configure_state(kwargs)
+
+ try:
+ yield
+ finally:
+ # Restore the original state.
+ self.__dict__.clear()
+ self._init()
+ configure_state(original_state)
+
+
+###############################################################################
+
+
+class _Lenient(threading.local):
+ def __init__(self, *args, **kwargs):
+ """
+ A container for managing the run-time lenient services and client
+ options for pre-defined functions/methods.
+
+ Args:
+
+ * args (callable or str or iterable of callable/str)
+ A function/method or fully qualified string name of the function/method
+ acting as a lenient service.
+
+ Kwargs:
+
+ * kwargs (dict of callable/str or iterable of callable/str)
+ Mapping of lenient client function/method, or fully qualified string name
+ of the function/method, to one or more lenient service
+ function/methods or fully qualified string name of function/methods.
+
+ For example::
+
+ _Lenient(service1, service2, client1=service1, client2=(service1, service2))
+
+ Note that, the values of these options are thread-specific.
+
+ """
+ # The executing lenient client at runtime.
+ self.__dict__["active"] = None
+ # The global lenient services state activation switch.
+ self.__dict__["enable"] = _LENIENT_ENABLE_DEFAULT
+
+ for service in args:
+ self.register_service(service)
+
+ for client, services in kwargs.items():
+ self.register_client(client, services)
+
+ def __call__(self, func):
+ """
+ Determine whether it is valid for the function/method to provide a
+ lenient service at runtime to the actively executing lenient client.
+
+ Args:
+
+ * func (callable or str):
+ A function/method or fully qualified string name of the function/method.
+
+ Returns:
+ Boolean.
+
+ """
+ result = False
+ if self.__dict__["enable"]:
+ service = _qualname(func)
+ if service in self and self.__dict__[service]:
+ active = self.__dict__["active"]
+ if active is not None and active in self:
+ services = self.__dict__[active]
+ if isinstance(services, str) or not isinstance(
+ services, Iterable
+ ):
+ services = (services,)
+ result = service in services
+ return result
+
+ def __contains__(self, name):
+ name = _qualname(name)
+ return name in self.__dict__
+
+ def __getattr__(self, name):
+ if name not in self.__dict__:
+ cls = self.__class__.__name__
+ emsg = f"Invalid {cls!r} option, got {name!r}."
+ raise AttributeError(emsg)
+ return self.__dict__[name]
+
+ def __getitem__(self, name):
+ name = _qualname(name)
+ if name not in self.__dict__:
+ cls = self.__class__.__name__
+ emsg = f"Invalid {cls!r} option, got {name!r}."
+ raise KeyError(emsg)
+ return self.__dict__[name]
+
+ def __repr__(self):
+ cls = self.__class__.__name__
+ width = len(cls) + 1
+ kwargs = [
+ "{}={!r}".format(name, self.__dict__[name])
+ for name in sorted(self.__dict__.keys())
+ ]
+ joiner = ",\n{}".format(" " * width)
+ return "{}({})".format(cls, joiner.join(kwargs))
+
+ def __setitem__(self, name, value):
+ name = _qualname(name)
+ cls = self.__class__.__name__
+
+ if name not in self.__dict__:
+ emsg = f"Invalid {cls!r} option, got {name!r}."
+ raise KeyError(emsg)
+
+ if name == "active":
+ value = _qualname(value)
+ if not isinstance(value, str) and value is not None:
+ emsg = f"Invalid {cls!r} option {name!r}, expected a registered {cls!r} client, got {value!r}."
+ raise ValueError(emsg)
+ self.__dict__[name] = value
+ elif name == "enable":
+ self.enable = value
+ else:
+ if isinstance(value, str) or callable(value):
+ value = (value,)
+ if isinstance(value, Iterable):
+ value = tuple([_qualname(item) for item in value])
+ self.__dict__[name] = value
+
+ @contextmanager
+ def context(self, *args, **kwargs):
+ """
+ Return a context manager which allows temporary modification of
+ the lenient option state for the active thread.
+
+ On entry to the context manager, all provided keyword arguments are
+ applied. On exit from the context manager, the previous lenient option
+ state is restored.
+
+ For example::
+ with iris._LENIENT.context(example_lenient_flag=False):
+ # ... code that expects some non-lenient behaviour
+
+ .. note::
+ iris._LENIENT.example_lenient_flag does not exist and is
+ provided only as an example.
+
+ """
+
+ def update_client(client, services):
+ if client in self.__dict__:
+ existing_services = self.__dict__[client]
+ else:
+ existing_services = ()
+
+ self.__dict__[client] = tuple(set(existing_services + services))
+
+ # Save the original state.
+ original_state = deepcopy(self.__dict__)
+
+ # Temporarily update the state with the kwargs first.
+ for name, value in kwargs.items():
+ self[name] = value
+
+ # Get the active client.
+ active = self.__dict__["active"]
+
+ if args:
+ # Update the client with the provided services.
+ new_services = tuple([_qualname(arg) for arg in args])
+
+ if active is None:
+ # Ensure not to use "context" as the ephemeral name
+ # of the context manager runtime "active" lenient client,
+ # as this causes a namespace clash with this method
+ # i.e., _Lenient.context, via _Lenient.__getattr__
+ active = "__context"
+ self.__dict__["active"] = active
+ self.__dict__[active] = new_services
+ else:
+ # Append provided services to any pre-existing services of the active client.
+ update_client(active, new_services)
+ else:
+ # Append previous ephemeral services (for non-specific client) to the active client.
+ if (
+ active is not None
+ and active != "__context"
+ and "__context" in self.__dict__
+ ):
+ new_services = self.__dict__["__context"]
+ update_client(active, new_services)
+
+ try:
+ yield
+ finally:
+ # Restore the original state.
+ self.__dict__.clear()
+ self.__dict__.update(original_state)
+
+ @property
+ def enable(self):
+ """Return the activation state of the lenient services."""
+ return self.__dict__["enable"]
+
+ @enable.setter
+ def enable(self, state):
+ """
+ Set the activate state of the lenient services.
+
+ Setting the state to `False` disables all lenient services, and
+ setting the state to `True` enables all lenient services.
+
+ Args:
+
+ * state (bool):
+ Activate state for lenient services.
+
+ """
+ if not isinstance(state, bool):
+ cls = self.__class__.__name__
+ emsg = f"Invalid {cls!r} option 'enable', expected a {type(True)!r}, got {state!r}."
+ raise ValueError(emsg)
+ self.__dict__["enable"] = state
+
+ def register_client(self, func, services, append=False):
+ """
+ Add the provided mapping of lenient client function/method to
+ required lenient service function/methods.
+
+ Args:
+
+ * func (callable or str):
+ A client function/method or fully qualified string name of the
+ client function/method.
+
+ * services (callable or str or iterable of callable/str):
+ One or more service function/methods or fully qualified string names
+ of the required service function/method.
+
+ Kwargs:
+
+ * append (bool):
+ If True, append the lenient services to any pre-registered lenient
+ services for the provided lenient client. Default is False.
+
+ """
+ func = _qualname(func)
+ cls = self.__class__.__name__
+
+ if func in _LENIENT_PROTECTED:
+ emsg = (
+ f"Cannot register {cls!r} client. "
+ f"Please rename your client to be something other than {func!r}."
+ )
+ raise ValueError(emsg)
+ if isinstance(services, str) or not isinstance(services, Iterable):
+ services = (services,)
+ if not len(services):
+ emsg = f"Require at least one {cls!r} client service."
+ raise ValueError(emsg)
+ services = tuple([_qualname(service) for service in services])
+ if append:
+ # The original provided service order is not significant. There is
+ # no requirement to preserve it, so it's safe to sort.
+ existing = self.__dict__[func] if func in self else ()
+ services = tuple(sorted(set(existing) | set(services)))
+ self.__dict__[func] = services
+
+ def register_service(self, func):
+ """
+ Add the provided function/method as providing a lenient service and
+ activate it.
+
+ Args:
+
+ * func (callable or str):
+ A service function/method or fully qualified string name of the
+ service function/method.
+
+ """
+ func = _qualname(func)
+ if func in _LENIENT_PROTECTED:
+ cls = self.__class__.__name__
+ emsg = (
+ f"Cannot register {cls!r} service. "
+ f"Please rename your service to be something other than {func!r}."
+ )
+ raise ValueError(emsg)
+ self.__dict__[func] = True
+
+ def unregister_client(self, func):
+ """
+ Remove the provided function/method as a lenient client using lenient services.
+
+ Args:
+
+ * func (callable or str):
+ A function/method of fully qualified string name of the function/method.
+
+ """
+ func = _qualname(func)
+ cls = self.__class__.__name__
+
+ if func in _LENIENT_PROTECTED:
+ emsg = f"Cannot unregister {cls!r} client, as {func!r} is a protected {cls!r} option."
+ raise ValueError(emsg)
+
+ if func in self.__dict__:
+ value = self.__dict__[func]
+ if isinstance(value, bool):
+ emsg = f"Cannot unregister {cls!r} client, as {func!r} is not a valid {cls!r} client."
+ raise ValueError(emsg)
+ del self.__dict__[func]
+ else:
+ emsg = f"Cannot unregister unknown {cls!r} client {func!r}."
+ raise ValueError(emsg)
+
+ def unregister_service(self, func):
+ """
+ Remove the provided function/method as providing a lenient service.
+
+ Args:
+
+ * func (callable or str):
+ A function/method or fully qualified string name of the function/method.
+
+ """
+ func = _qualname(func)
+ cls = self.__class__.__name__
+
+ if func in _LENIENT_PROTECTED:
+ emsg = f"Cannot unregister {cls!r} service, as {func!r} is a protected {cls!r} option."
+ raise ValueError(emsg)
+
+ if func in self.__dict__:
+ value = self.__dict__[func]
+ if not isinstance(value, bool):
+ emsg = f"Cannot unregister {cls!r} service, as {func!r} is not a valid {cls!r} service."
+ raise ValueError(emsg)
+ del self.__dict__[func]
+ else:
+ emsg = f"Cannot unregister unknown {cls!r} service {func!r}."
+ raise ValueError(emsg)
+
+
+#: (Private) Instance that manages all Iris run-time lenient client and service options.
+_LENIENT = _Lenient()
+
+#: (Public) Instance that manages all Iris run-time lenient features.
+LENIENT = Lenient()
diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py
new file mode 100644
index 0000000000..af097ab4ec
--- /dev/null
+++ b/lib/iris/common/metadata.py
@@ -0,0 +1,1477 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+
+from abc import ABCMeta
+from collections import namedtuple
+from collections.abc import Iterable, Mapping
+from copy import deepcopy
+from functools import wraps
+import logging
+import re
+
+import numpy as np
+import numpy.ma as ma
+from xxhash import xxh64_hexdigest
+
+from .lenient import _LENIENT
+from .lenient import _lenient_service as lenient_service
+from .lenient import _qualname as qualname
+
+
+__all__ = [
+ "SERVICES_COMBINE",
+ "SERVICES_DIFFERENCE",
+ "SERVICES_EQUAL",
+ "SERVICES",
+ "AncillaryVariableMetadata",
+ "BaseMetadata",
+ "CellMeasureMetadata",
+ "CoordMetadata",
+ "CubeMetadata",
+ "DimCoordMetadata",
+ "metadata_manager_factory",
+]
+
+
+# https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name
+_TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""")
+
+# Configure the logger.
+logger = logging.getLogger(__name__)
+
+
+def _hexdigest(value):
+ """
+ Return a hexidecimal string hash representation of the provided value.
+
+ Calculates a 64-bit non-cryptographic hash of the provided value,
+ and returns the hexdigest string representation of the calculated hash.
+
+ """
+ # Special case: deal with numpy arrays.
+ if ma.isMaskedArray(value):
+ parts = (
+ value.shape,
+ xxh64_hexdigest(value.data),
+ xxh64_hexdigest(value.mask),
+ )
+ value = str(parts)
+ elif isinstance(value, np.ndarray):
+ parts = (value.shape, xxh64_hexdigest(value))
+ value = str(parts)
+
+ try:
+ # Calculate single-shot hash to avoid allocating state on the heap
+ result = xxh64_hexdigest(value)
+ except TypeError:
+ # xxhash expects a bytes-like object, so try hashing the
+ # string representation of the provided value instead, but
+ # also fold in the object type...
+ parts = (type(value), value)
+ result = xxh64_hexdigest(str(parts))
+
+ return result
+
+
+class _NamedTupleMeta(ABCMeta):
+ """
+ Meta-class to support the convenience of creating a namedtuple from
+ names/members of the metadata class hierarchy.
+
+ """
+
+ def __new__(mcs, name, bases, namespace):
+ names = []
+
+ for base in bases:
+ if hasattr(base, "_fields"):
+ base_names = getattr(base, "_fields")
+ is_abstract = getattr(
+ base_names, "__isabstractmethod__", False
+ )
+ if not is_abstract:
+ if (not isinstance(base_names, Iterable)) or isinstance(
+ base_names, str
+ ):
+ base_names = (base_names,)
+ names.extend(base_names)
+
+ if "_members" in namespace and not getattr(
+ namespace["_members"], "__isabstractmethod__", False
+ ):
+ namespace_names = namespace["_members"]
+
+ if (not isinstance(namespace_names, Iterable)) or isinstance(
+ namespace_names, str
+ ):
+ namespace_names = (namespace_names,)
+
+ names.extend(namespace_names)
+
+ if names:
+ item = namedtuple(f"{name}Namedtuple", names)
+ bases = list(bases)
+ # Influence the appropriate MRO.
+ bases.insert(0, item)
+ bases = tuple(bases)
+
+ return super().__new__(mcs, name, bases, namespace)
+
+
+class BaseMetadata(metaclass=_NamedTupleMeta):
+ """
+ Container for common metadata.
+
+ """
+
+ DEFAULT_NAME = "unknown" # the fall-back name for metadata identity
+
+ _members = (
+ "standard_name",
+ "long_name",
+ "var_name",
+ "units",
+ "attributes",
+ )
+
+ __slots__ = ()
+
+ @lenient_service
+ def __eq__(self, other):
+ """
+ Determine whether the associated metadata members are equivalent.
+
+ Args:
+
+ * other (metadata):
+ A metadata instance of the same type.
+
+ Returns:
+ Boolean.
+
+ """
+ result = NotImplemented
+ # Only perform equivalence with similar class instances.
+ if hasattr(other, "__class__") and other.__class__ is self.__class__:
+ if _LENIENT(self.__eq__) or _LENIENT(self.equal):
+ # Perform "lenient" equality.
+ logger.debug(
+ "lenient", extra=dict(cls=self.__class__.__name__)
+ )
+ result = self._compare_lenient(other)
+ else:
+ # Perform "strict" equality.
+ logger.debug("strict", extra=dict(cls=self.__class__.__name__))
+
+ def func(field):
+ left = getattr(self, field)
+ right = getattr(other, field)
+ if self._is_attributes(field, left, right):
+ result = self._compare_strict_attributes(left, right)
+ else:
+ result = left == right
+ return result
+
+ # Note that, for strict we use "_fields" not "_members".
+ # The "circular" member does not participate in strict equivalence.
+ fields = filter(
+ lambda field: field != "circular", self._fields
+ )
+ result = all([func(field) for field in fields])
+
+ return result
+
+ def __lt__(self, other):
+ #
+ # Support Python2 behaviour for a "<" operation involving a
+ # "NoneType" operand.
+ #
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+
+ def _sort_key(item):
+ keys = []
+ for field in item._fields:
+ if field != "attributes":
+ value = getattr(item, field)
+ keys.extend((value is not None, value))
+ return tuple(keys)
+
+ return _sort_key(self) < _sort_key(other)
+
+ def __ne__(self, other):
+ result = self.__eq__(other)
+ if result is not NotImplemented:
+ result = not result
+
+ return result
+
+ def _api_common(
+ self, other, func_service, func_operation, action, lenient=None
+ ):
+ """
+ Common entry-point for lenient metadata API methods.
+
+ Args:
+
+ * other (metadata):
+ A metadata instance of the same type.
+
+ * func_service (callable):
+ The parent service method offering the API entry-point to the service.
+
+ * func_operation (callable):
+ The parent service method that provides the actual service.
+
+ * action (str):
+ The verb describing the service operation.
+
+ Kwargs:
+
+ * lenient (boolean):
+ Enable/disable the lenient service operation. The default is to automatically
+ detect whether this lenient service operation is enabled.
+
+ Returns:
+ The result of the service operation to the parent service caller.
+
+ """
+ # Ensure that we have similar class instances.
+ if (
+ not hasattr(other, "__class__")
+ or other.__class__ is not self.__class__
+ ):
+ emsg = "Cannot {} {!r} with {!r}."
+ raise TypeError(
+ emsg.format(action, self.__class__.__name__, type(other))
+ )
+
+ if lenient is None:
+ result = func_operation(other)
+ else:
+ if lenient:
+ # Use qualname to disassociate from the instance bounded method.
+ args, kwargs = (qualname(func_service),), dict()
+ else:
+ # Use qualname to guarantee that the instance bounded method
+ # is a hashable key.
+ args, kwargs = (), {qualname(func_service): False}
+
+ with _LENIENT.context(*args, **kwargs):
+ result = func_operation(other)
+
+ return result
+
+ def _combine(self, other):
+ """Perform associated metadata member combination."""
+ if _LENIENT(self.combine):
+ # Perform "lenient" combine.
+ logger.debug("lenient", extra=dict(cls=self.__class__.__name__))
+ values = self._combine_lenient(other)
+ else:
+ # Perform "strict" combine.
+ logger.debug("strict", extra=dict(cls=self.__class__.__name__))
+
+ def func(field):
+ left = getattr(self, field)
+ right = getattr(other, field)
+ if self._is_attributes(field, left, right):
+ result = self._combine_strict_attributes(left, right)
+ else:
+ result = left if left == right else None
+ return result
+
+ # Note that, for strict we use "_fields" not "_members".
+ values = [func(field) for field in self._fields]
+
+ return values
+
+ def _combine_lenient(self, other):
+ """
+ Perform lenient combination of metadata members.
+
+ Args:
+
+ * other (BaseMetadata):
+ The other metadata participating in the lenient combination.
+
+ Returns:
+ A list of combined metadata member values.
+
+ """
+
+ def func(field):
+ left = getattr(self, field)
+ right = getattr(other, field)
+ result = None
+ if field == "units":
+ # Perform "strict" combination for "units".
+ result = left if left == right else None
+ elif self._is_attributes(field, left, right):
+ result = self._combine_lenient_attributes(left, right)
+ else:
+ if left == right:
+ result = left
+ elif left is None:
+ result = right
+ elif right is None:
+ result = left
+ return result
+
+ # Note that, we use "_members" not "_fields".
+ return [func(field) for field in BaseMetadata._members]
+
+ @staticmethod
+ def _combine_lenient_attributes(left, right):
+ """Leniently combine the dictionary members together."""
+ # Copy the dictionaries.
+ left = deepcopy(left)
+ right = deepcopy(right)
+ # Use xxhash to perform an extremely fast non-cryptographic hash of
+ # each dictionary key rvalue, thus ensuring that the dictionary is
+ # completely hashable, as required by a set.
+ sleft = {(k, _hexdigest(v)) for k, v in left.items()}
+ sright = {(k, _hexdigest(v)) for k, v in right.items()}
+ # Intersection of common items.
+ common = sleft & sright
+ # Items in sleft different from sright.
+ dsleft = dict(sleft - sright)
+ # Items in sright different from sleft.
+ dsright = dict(sright - sleft)
+ # Intersection of common item keys with different values.
+ keys = set(dsleft.keys()) & set(dsright.keys())
+ # Remove (in-place) common item keys with different values.
+ [dsleft.pop(key) for key in keys]
+ [dsright.pop(key) for key in keys]
+ # Now bring the result together.
+ result = {k: left[k] for k, _ in common}
+ result.update({k: left[k] for k in dsleft.keys()})
+ result.update({k: right[k] for k in dsright.keys()})
+
+ return result
+
+ @staticmethod
+ def _combine_strict_attributes(left, right):
+ """Perform strict combination of the dictionary members."""
+ # Copy the dictionaries.
+ left = deepcopy(left)
+ right = deepcopy(right)
+ # Use xxhash to perform an extremely fast non-cryptographic hash of
+ # each dictionary key rvalue, thus ensuring that the dictionary is
+ # completely hashable, as required by a set.
+ sleft = {(k, _hexdigest(v)) for k, v in left.items()}
+ sright = {(k, _hexdigest(v)) for k, v in right.items()}
+ # Intersection of common items.
+ common = sleft & sright
+ # Now bring the result together.
+ result = {k: left[k] for k, _ in common}
+
+ return result
+
+ def _compare_lenient(self, other):
+ """
+ Perform lenient equality of metadata members.
+
+ Args:
+
+ * other (BaseMetadata):
+ The other metadata participating in the lenient comparison.
+
+ Returns:
+ Boolean.
+
+ """
+ result = False
+
+ # Use the "name" method to leniently compare "standard_name",
+ # "long_name", and "var_name" in a well defined way.
+ if self.name() == other.name():
+
+ def func(field):
+ left = getattr(self, field)
+ right = getattr(other, field)
+ if field == "units":
+ # Perform "strict" compare for "units".
+ result = left == right
+ elif self._is_attributes(field, left, right):
+ result = self._compare_lenient_attributes(left, right)
+ else:
+ # Perform "lenient" compare for members.
+ result = (left == right) or left is None or right is None
+ return result
+
+ # Note that, we use "_members" not "_fields".
+ # Lenient equality explicitly ignores the "var_name" member.
+ result = all(
+ [
+ func(field)
+ for field in BaseMetadata._members
+ if field != "var_name"
+ ]
+ )
+
+ return result
+
+ @staticmethod
+ def _compare_lenient_attributes(left, right):
+ """Perform lenient compare between the dictionary members."""
+ # Use xxhash to perform an extremely fast non-cryptographic hash of
+ # each dictionary key rvalue, thus ensuring that the dictionary is
+ # completely hashable, as required by a set.
+ sleft = {(k, _hexdigest(v)) for k, v in left.items()}
+ sright = {(k, _hexdigest(v)) for k, v in right.items()}
+ # Items in sleft different from sright.
+ dsleft = dict(sleft - sright)
+ # Items in sright different from sleft.
+ dsright = dict(sright - sleft)
+ # Intersection of common item keys with different values.
+ keys = set(dsleft.keys()) & set(dsright.keys())
+
+ return not bool(keys)
+
+ @staticmethod
+ def _compare_strict_attributes(left, right):
+ """Perform strict compare between the dictionary members."""
+ # Use xxhash to perform an extremely fast non-cryptographic hash of
+ # each dictionary key rvalue, thus ensuring that the dictionary is
+ # completely hashable, as required by a set.
+ sleft = {(k, _hexdigest(v)) for k, v in left.items()}
+ sright = {(k, _hexdigest(v)) for k, v in right.items()}
+
+ return sleft == sright
+
+ def _difference(self, other):
+ """Perform associated metadata member difference."""
+ if _LENIENT(self.difference):
+ # Perform "lenient" difference.
+ logger.debug("lenient", extra=dict(cls=self.__class__.__name__))
+ values = self._difference_lenient(other)
+ else:
+ # Perform "strict" difference.
+ logger.debug("strict", extra=dict(cls=self.__class__.__name__))
+
+ def func(field):
+ left = getattr(self, field)
+ right = getattr(other, field)
+ if self._is_attributes(field, left, right):
+ result = self._difference_strict_attributes(left, right)
+ else:
+ result = None if left == right else (left, right)
+ return result
+
+ # Note that, for strict we use "_fields" not "_members".
+ values = [func(field) for field in self._fields]
+
+ return values
+
+ def _difference_lenient(self, other):
+ """
+ Perform lenient difference of metadata members.
+
+ Args:
+
+ * other (BaseMetadata):
+ The other metadata participating in the lenient difference.
+
+ Returns:
+ A list of difference metadata member values.
+
+ """
+
+ def func(field):
+ left = getattr(self, field)
+ right = getattr(other, field)
+ if field == "units":
+ # Perform "strict" difference for "units".
+ result = None if left == right else (left, right)
+ elif self._is_attributes(field, left, right):
+ result = self._difference_lenient_attributes(left, right)
+ else:
+ # Perform "lenient" difference for members.
+ result = (
+ (left, right)
+ if left is not None and right is not None and left != right
+ else None
+ )
+ return result
+
+ # Note that, we use "_members" not "_fields".
+ return [func(field) for field in BaseMetadata._members]
+
+ @staticmethod
+ def _difference_lenient_attributes(left, right):
+ """Perform lenient difference between the dictionary members."""
+ # Use xxhash to perform an extremely fast non-cryptographic hash of
+ # each dictionary key rvalue, thus ensuring that the dictionary is
+ # completely hashable, as required by a set.
+ sleft = {(k, _hexdigest(v)) for k, v in left.items()}
+ sright = {(k, _hexdigest(v)) for k, v in right.items()}
+ # Items in sleft different from sright.
+ dsleft = dict(sleft - sright)
+ # Items in sright different from sleft.
+ dsright = dict(sright - sleft)
+ # Intersection of common item keys with different values.
+ keys = set(dsleft.keys()) & set(dsright.keys())
+ # Keep (in-place) common item keys with different values.
+ [dsleft.pop(key) for key in list(dsleft.keys()) if key not in keys]
+ [dsright.pop(key) for key in list(dsright.keys()) if key not in keys]
+
+ if not bool(dsleft) and not bool(dsright):
+ result = None
+ else:
+ # Replace hash-rvalue with original rvalue.
+ dsleft = {k: left[k] for k in dsleft.keys()}
+ dsright = {k: right[k] for k in dsright.keys()}
+ result = (dsleft, dsright)
+
+ return result
+
+ @staticmethod
+ def _difference_strict_attributes(left, right):
+ """Perform strict difference between the dictionary members."""
+ # Use xxhash to perform an extremely fast non-cryptographic hash of
+ # each dictionary key rvalue, thus ensuring that the dictionary is
+ # completely hashable, as required by a set.
+ sleft = {(k, _hexdigest(v)) for k, v in left.items()}
+ sright = {(k, _hexdigest(v)) for k, v in right.items()}
+ # Items in sleft different from sright.
+ dsleft = dict(sleft - sright)
+ # Items in sright different from sleft.
+ dsright = dict(sright - sleft)
+
+ if not bool(dsleft) and not bool(dsright):
+ result = None
+ else:
+ # Replace hash-rvalue with original rvalue.
+ dsleft = {k: left[k] for k in dsleft.keys()}
+ dsright = {k: right[k] for k in dsright.keys()}
+ result = (dsleft, dsright)
+
+ return result
+
+ @staticmethod
+ def _is_attributes(field, left, right):
+ """Determine whether we have two 'attributes' dictionaries."""
+ return (
+ field == "attributes"
+ and isinstance(left, Mapping)
+ and isinstance(right, Mapping)
+ )
+
+ @lenient_service
+ def combine(self, other, lenient=None):
+ """
+ Return a new metadata instance created by combining each of the
+ associated metadata members.
+
+ Args:
+
+ * other (metadata):
+ A metadata instance of the same type.
+
+ Kwargs:
+
+ * lenient (boolean):
+ Enable/disable lenient combination. The default is to automatically
+ detect whether this lenient operation is enabled.
+
+ Returns:
+ Metadata instance.
+
+ """
+ result = self._api_common(
+ other, self.combine, self._combine, "combine", lenient=lenient
+ )
+ return self.__class__(*result)
+
+ @lenient_service
+ def difference(self, other, lenient=None):
+ """
+ Return a new metadata instance created by performing a difference
+ comparison between each of the associated metadata members.
+
+ A metadata member returned with a value of "None" indicates that there
+ is no difference between the members being compared. Otherwise, a tuple
+ of the different values is returned.
+
+ Args:
+
+ * other (metadata):
+ A metadata instance of the same type.
+
+ Kwargs:
+
+ * lenient (boolean):
+ Enable/disable lenient difference. The default is to automatically
+ detect whether this lenient operation is enabled.
+
+ Returns:
+ Metadata instance of member differences or None.
+
+ """
+ result = self._api_common(
+ other, self.difference, self._difference, "differ", lenient=lenient
+ )
+ result = (
+ None
+ if all([item is None for item in result])
+ else self.__class__(*result)
+ )
+ return result
+
+ @lenient_service
+ def equal(self, other, lenient=None):
+ """
+ Determine whether the associated metadata members are equivalent.
+
+ Args:
+
+ * other (metadata):
+ A metadata instance of the same type.
+
+ Kwargs:
+
+ * lenient (boolean):
+ Enable/disable lenient equivalence. The default is to automatically
+ detect whether this lenient operation is enabled.
+
+ Returns:
+ Boolean.
+
+ """
+ result = self._api_common(
+ other, self.equal, self.__eq__, "compare", lenient=lenient
+ )
+ return result
+
+ @classmethod
+ def from_metadata(cls, other):
+ result = None
+ if isinstance(other, BaseMetadata):
+ if other.__class__ is cls:
+ result = other
+ else:
+ kwargs = {field: None for field in cls._fields}
+ fields = set(cls._fields) & set(other._fields)
+ for field in fields:
+ kwargs[field] = getattr(other, field)
+ result = cls(**kwargs)
+ return result
+
+ def name(self, default=None, token=False):
+ """
+ Returns a string name representing the identity of the metadata.
+
+ First it tries standard name, then it tries the long name, then
+ the NetCDF variable name, before falling-back to a default value,
+ which itself defaults to the string 'unknown'.
+
+ Kwargs:
+
+ * default:
+ The fall-back string representing the default name. Defaults to
+ the string 'unknown'.
+ * token:
+ If True, ensures that the name returned satisfies the criteria for
+ the characters required by a valid NetCDF name. If it is not
+ possible to return a valid name, then a ValueError exception is
+ raised. Defaults to False.
+
+ Returns:
+ String.
+
+ """
+
+ def _check(item):
+ return self.token(item) if token else item
+
+ default = self.DEFAULT_NAME if default is None else default
+
+ result = (
+ _check(self.standard_name)
+ or _check(self.long_name)
+ or _check(self.var_name)
+ or _check(default)
+ )
+
+ if token and result is None:
+ emsg = "Cannot retrieve a valid name token from {!r}"
+ raise ValueError(emsg.format(self))
+
+ return result
+
+ @classmethod
+ def token(cls, name):
+ """
+ Determine whether the provided name is a valid NetCDF name and thus
+ safe to represent a single parsable token.
+
+ Args:
+
+ * name:
+ The string name to verify
+
+ Returns:
+ The provided name if valid, otherwise None.
+
+ """
+ if name is not None:
+ result = _TOKEN_PARSE.match(name)
+ name = result if result is None else name
+
+ return name
+
+
+class AncillaryVariableMetadata(BaseMetadata):
+ """
+ Metadata container for a :class:`~iris.coords.AncillaryVariableMetadata`.
+
+ """
+
+ __slots__ = ()
+
+ @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=())
+ @lenient_service
+ def __eq__(self, other):
+ return super().__eq__(other)
+
+ @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=())
+ @lenient_service
+ def combine(self, other, lenient=None):
+ return super().combine(other, lenient=lenient)
+
+ @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=())
+ @lenient_service
+ def difference(self, other, lenient=None):
+ return super().difference(other, lenient=lenient)
+
+ @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=())
+ @lenient_service
+ def equal(self, other, lenient=None):
+ return super().equal(other, lenient=lenient)
+
+
+class CellMeasureMetadata(BaseMetadata):
+ """
+ Metadata container for a :class:`~iris.coords.CellMeasure`.
+
+ """
+
+ _members = "measure"
+
+ __slots__ = ()
+
+ @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=())
+ @lenient_service
+ def __eq__(self, other):
+ return super().__eq__(other)
+
+ def _combine_lenient(self, other):
+ """
+ Perform lenient combination of metadata members for cell measures.
+
+ Args:
+
+ * other (CellMeasureMetadata):
+ The other cell measure metadata participating in the lenient
+ combination.
+
+ Returns:
+ A list of combined metadata member values.
+
+ """
+ # Perform "strict" combination for "measure".
+ value = self.measure if self.measure == other.measure else None
+ # Perform lenient combination of the other parent members.
+ result = super()._combine_lenient(other)
+ result.append(value)
+
+ return result
+
+ def _compare_lenient(self, other):
+ """
+ Perform lenient equality of metadata members for cell measures.
+
+ Args:
+
+ * other (CellMeasureMetadata):
+ The other cell measure metadata participating in the lenient
+ comparison.
+
+ Returns:
+ Boolean.
+
+ """
+ # Perform "strict" comparison for "measure".
+ result = self.measure == other.measure
+ if result:
+ # Perform lenient comparison of the other parent members.
+ result = super()._compare_lenient(other)
+
+ return result
+
+ def _difference_lenient(self, other):
+ """
+ Perform lenient difference of metadata members for cell measures.
+
+ Args:
+
+ * other (CellMeasureMetadata):
+ The other cell measure metadata participating in the lenient
+ difference.
+
+ Returns:
+ A list of difference metadata member values.
+
+ """
+ # Perform "strict" difference for "measure".
+ value = (
+ None
+ if self.measure == other.measure
+ else (self.measure, other.measure)
+ )
+ # Perform lenient difference of the other parent members.
+ result = super()._difference_lenient(other)
+ result.append(value)
+
+ return result
+
+ @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=())
+ @lenient_service
+ def combine(self, other, lenient=None):
+ return super().combine(other, lenient=lenient)
+
+ @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=())
+ @lenient_service
+ def difference(self, other, lenient=None):
+ return super().difference(other, lenient=lenient)
+
+ @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=())
+ @lenient_service
+ def equal(self, other, lenient=None):
+ return super().equal(other, lenient=lenient)
+
+
+class CoordMetadata(BaseMetadata):
+ """
+ Metadata container for a :class:`~iris.coords.Coord`.
+
+ """
+
+ _members = ("coord_system", "climatological")
+
+ __slots__ = ()
+
+ @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=())
+ @lenient_service
+ def __eq__(self, other):
+ # Convert a DimCoordMetadata instance to a CoordMetadata instance.
+ if (
+ self.__class__ is CoordMetadata
+ and hasattr(other, "__class__")
+ and other.__class__ is DimCoordMetadata
+ ):
+ other = self.from_metadata(other)
+ return super().__eq__(other)
+
+ def __lt__(self, other):
+ #
+ # Support Python2 behaviour for a "<" operation involving a
+ # "NoneType" operand.
+ #
+ if not isinstance(other, BaseMetadata):
+ return NotImplemented
+
+ if other.__class__ is DimCoordMetadata:
+ other = self.from_metadata(other)
+
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+
+ def _sort_key(item):
+ keys = []
+ for field in item._fields:
+ if field not in ("attributes", "coord_system"):
+ value = getattr(item, field)
+ keys.extend((value is not None, value))
+ return tuple(keys)
+
+ return _sort_key(self) < _sort_key(other)
+
+ def _combine_lenient(self, other):
+ """
+ Perform lenient combination of metadata members for coordinates.
+
+ Args:
+
+ * other (CoordMetadata):
+ The other coordinate metadata participating in the lenient
+ combination.
+
+ Returns:
+ A list of combined metadata member values.
+
+ """
+ # Perform "strict" combination for "coord_system" and "climatological".
+ def func(field):
+ left = getattr(self, field)
+ right = getattr(other, field)
+ return left if left == right else None
+
+ # Note that, we use "_members" not "_fields".
+ values = [func(field) for field in CoordMetadata._members]
+ # Perform lenient combination of the other parent members.
+ result = super()._combine_lenient(other)
+ result.extend(values)
+
+ return result
+
+ def _compare_lenient(self, other):
+ """
+ Perform lenient equality of metadata members for coordinates.
+
+ Args:
+
+ * other (CoordMetadata):
+ The other coordinate metadata participating in the lenient
+ comparison.
+
+ Returns:
+ Boolean.
+
+ """
+ # Perform "strict" comparison for "coord_system" and "climatological".
+ result = all(
+ [
+ getattr(self, field) == getattr(other, field)
+ for field in CoordMetadata._members
+ ]
+ )
+ if result:
+ # Perform lenient comparison of the other parent members.
+ result = super()._compare_lenient(other)
+
+ return result
+
+ def _difference_lenient(self, other):
+ """
+ Perform lenient difference of metadata members for coordinates.
+
+ Args:
+
+ * other (CoordMetadata):
+ The other coordinate metadata participating in the lenient
+ difference.
+
+ Returns:
+ A list of difference metadata member values.
+
+ """
+ # Perform "strict" difference for "coord_system" and "climatological".
+ def func(field):
+ left = getattr(self, field)
+ right = getattr(other, field)
+ return None if left == right else (left, right)
+
+ # Note that, we use "_members" not "_fields".
+ values = [func(field) for field in CoordMetadata._members]
+ # Perform lenient difference of the other parent members.
+ result = super()._difference_lenient(other)
+ result.extend(values)
+
+ return result
+
+ @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=())
+ @lenient_service
+ def combine(self, other, lenient=None):
+ # Convert a DimCoordMetadata instance to a CoordMetadata instance.
+ if (
+ self.__class__ is CoordMetadata
+ and hasattr(other, "__class__")
+ and other.__class__ is DimCoordMetadata
+ ):
+ other = self.from_metadata(other)
+ return super().combine(other, lenient=lenient)
+
+ @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=())
+ @lenient_service
+ def difference(self, other, lenient=None):
+ # Convert a DimCoordMetadata instance to a CoordMetadata instance.
+ if (
+ self.__class__ is CoordMetadata
+ and hasattr(other, "__class__")
+ and other.__class__ is DimCoordMetadata
+ ):
+ other = self.from_metadata(other)
+ return super().difference(other, lenient=lenient)
+
+ @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=())
+ @lenient_service
+ def equal(self, other, lenient=None):
+ # Convert a DimCoordMetadata instance to a CoordMetadata instance.
+ if (
+ self.__class__ is CoordMetadata
+ and hasattr(other, "__class__")
+ and other.__class__ is DimCoordMetadata
+ ):
+ other = self.from_metadata(other)
+ return super().equal(other, lenient=lenient)
+
+
+class CubeMetadata(BaseMetadata):
+ """
+ Metadata container for a :class:`~iris.cube.Cube`.
+
+ """
+
+ _members = "cell_methods"
+
+ __slots__ = ()
+
+ @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=())
+ @lenient_service
+ def __eq__(self, other):
+ return super().__eq__(other)
+
+ def __lt__(self, other):
+ #
+ # Support Python2 behaviour for a "<" operation involving a
+ # "NoneType" operand.
+ #
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+
+ def _sort_key(item):
+ keys = []
+ for field in item._fields:
+ if field not in ("attributes", "cell_methods"):
+ value = getattr(item, field)
+ keys.extend((value is not None, value))
+ return tuple(keys)
+
+ return _sort_key(self) < _sort_key(other)
+
+ def _combine_lenient(self, other):
+ """
+ Perform lenient combination of metadata members for cubes.
+
+ Args:
+
+ * other (CubeMetadata):
+ The other cube metadata participating in the lenient combination.
+
+ Returns:
+ A list of combined metadata member values.
+
+ """
+ # Perform "strict" combination for "cell_methods".
+ value = (
+ self.cell_methods
+ if self.cell_methods == other.cell_methods
+ else None
+ )
+ # Perform lenient combination of the other parent members.
+ result = super()._combine_lenient(other)
+ result.append(value)
+
+ return result
+
+ def _compare_lenient(self, other):
+ """
+ Perform lenient equality of metadata members for cubes.
+
+ Args:
+
+ * other (CubeMetadata):
+ The other cube metadata participating in the lenient comparison.
+
+ Returns:
+ Boolean.
+
+ """
+ # Perform "strict" comparison for "cell_methods".
+ result = self.cell_methods == other.cell_methods
+ if result:
+ result = super()._compare_lenient(other)
+
+ return result
+
+ def _difference_lenient(self, other):
+ """
+ Perform lenient difference of metadata members for cubes.
+
+ Args:
+
+ * other (CubeMetadata):
+ The other cube metadata participating in the lenient difference.
+
+ Returns:
+ A list of difference metadata member values.
+
+ """
+ # Perform "strict" difference for "cell_methods".
+ value = (
+ None
+ if self.cell_methods == other.cell_methods
+ else (self.cell_methods, other.cell_methods)
+ )
+ # Perform lenient difference of the other parent members.
+ result = super()._difference_lenient(other)
+ result.append(value)
+
+ return result
+
+ @property
+ def _names(self):
+ """
+ A tuple containing the value of each name participating in the identity
+ of a :class:`iris.cube.Cube`. This includes the standard name,
+ long name, NetCDF variable name, and the STASH from the attributes
+ dictionary.
+
+ """
+ standard_name = self.standard_name
+ long_name = self.long_name
+ var_name = self.var_name
+
+ # Defensive enforcement of attributes being a dictionary.
+ if not isinstance(self.attributes, Mapping):
+ try:
+ self.attributes = dict()
+ except AttributeError:
+ emsg = "Invalid '{}.attributes' member, must be a mapping."
+ raise AttributeError(emsg.format(self.__class__.__name__))
+
+ stash_name = self.attributes.get("STASH")
+ if stash_name is not None:
+ stash_name = str(stash_name)
+
+ return standard_name, long_name, var_name, stash_name
+
+ @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=())
+ @lenient_service
+ def combine(self, other, lenient=None):
+ return super().combine(other, lenient=lenient)
+
+ @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=())
+ @lenient_service
+ def difference(self, other, lenient=None):
+ return super().difference(other, lenient=lenient)
+
+ @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=())
+ @lenient_service
+ def equal(self, other, lenient=None):
+ return super().equal(other, lenient=lenient)
+
+ @wraps(BaseMetadata.name)
+ def name(self, default=None, token=False):
+ def _check(item):
+ return self.token(item) if token else item
+
+ default = self.DEFAULT_NAME if default is None else default
+
+ # Defensive enforcement of attributes being a dictionary.
+ if not isinstance(self.attributes, Mapping):
+ try:
+ self.attributes = dict()
+ except AttributeError:
+ emsg = "Invalid '{}.attributes' member, must be a mapping."
+ raise AttributeError(emsg.format(self.__class__.__name__))
+
+ result = (
+ _check(self.standard_name)
+ or _check(self.long_name)
+ or _check(self.var_name)
+ or _check(str(self.attributes.get("STASH", "")))
+ or _check(default)
+ )
+
+ if token and result is None:
+ emsg = "Cannot retrieve a valid name token from {!r}"
+ raise ValueError(emsg.format(self))
+
+ return result
+
+
+class DimCoordMetadata(CoordMetadata):
+ """
+ Metadata container for a :class:`~iris.coords.DimCoord"
+
+ """
+
+ # The "circular" member is stateful only, and does not participate
+ # in lenient/strict equivalence.
+ _members = ("circular",)
+
+ __slots__ = ()
+
+ @wraps(CoordMetadata.__eq__, assigned=("__doc__",), updated=())
+ @lenient_service
+ def __eq__(self, other):
+ # Convert a CoordMetadata instance to a DimCoordMetadata instance.
+ if hasattr(other, "__class__") and other.__class__ is CoordMetadata:
+ other = self.from_metadata(other)
+ return super().__eq__(other)
+
+ def __lt__(self, other):
+ #
+ # Support Python2 behaviour for a "<" operation involving a
+ # "NoneType" operand.
+ #
+ if not isinstance(other, BaseMetadata):
+ return NotImplemented
+
+ if other.__class__ is CoordMetadata:
+ other = self.from_metadata(other)
+
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+
+ def _sort_key(item):
+ keys = []
+ for field in item._fields:
+ if field not in ("attributes", "coord_system"):
+ value = getattr(item, field)
+ keys.extend((value is not None, value))
+ return tuple(keys)
+
+ return _sort_key(self) < _sort_key(other)
+
+ @wraps(CoordMetadata._combine_lenient, assigned=("__doc__",), updated=())
+ def _combine_lenient(self, other):
+ # Perform "strict" combination for "circular".
+ value = self.circular if self.circular == other.circular else None
+ # Perform lenient combination of the other parent members.
+ result = super()._combine_lenient(other)
+ result.append(value)
+
+ return result
+
+ @wraps(CoordMetadata._compare_lenient, assigned=("__doc__",), updated=())
+ def _compare_lenient(self, other):
+ # The "circular" member is not part of lenient equivalence.
+ return super()._compare_lenient(other)
+
+ @wraps(
+ CoordMetadata._difference_lenient, assigned=("__doc__",), updated=()
+ )
+ def _difference_lenient(self, other):
+ # Perform "strict" difference for "circular".
+ value = (
+ None
+ if self.circular == other.circular
+ else (self.circular, other.circular)
+ )
+ # Perform lenient difference of the other parent members.
+ result = super()._difference_lenient(other)
+ result.append(value)
+
+ return result
+
+ @wraps(CoordMetadata.combine, assigned=("__doc__",), updated=())
+ @lenient_service
+ def combine(self, other, lenient=None):
+ # Convert a CoordMetadata instance to a DimCoordMetadata instance.
+ if hasattr(other, "__class__") and other.__class__ is CoordMetadata:
+ other = self.from_metadata(other)
+ return super().combine(other, lenient=lenient)
+
+ @wraps(CoordMetadata.difference, assigned=("__doc__",), updated=())
+ @lenient_service
+ def difference(self, other, lenient=None):
+ # Convert a CoordMetadata instance to a DimCoordMetadata instance.
+ if hasattr(other, "__class__") and other.__class__ is CoordMetadata:
+ other = self.from_metadata(other)
+ return super().difference(other, lenient=lenient)
+
+ @wraps(CoordMetadata.equal, assigned=("__doc__",), updated=())
+ @lenient_service
+ def equal(self, other, lenient=None):
+ # Convert a CoordMetadata instance to a DimCoordMetadata instance.
+ if hasattr(other, "__class__") and other.__class__ is CoordMetadata:
+ other = self.from_metadata(other)
+ return super().equal(other, lenient=lenient)
+
+
+def metadata_manager_factory(cls, **kwargs):
+ """
+ A class instance factory function responsible for manufacturing
+ metadata instances dynamically at runtime.
+
+ The factory instances returned by the factory are capable of managing
+ their metadata state, which can be proxied by the owning container.
+
+ Args:
+
+ * cls:
+ A subclass of :class:`~iris.common.metadata.BaseMetadata`, defining
+ the metadata to be managed.
+
+ Kwargs:
+
+ * kwargs:
+ Initial values for the manufactured metadata instance. Unspecified
+ fields will default to a value of 'None'.
+
+ """
+
+ def __init__(self, cls, **kwargs):
+ # Restrict to only dealing with appropriate metadata classes.
+ if not issubclass(cls, BaseMetadata):
+ emsg = "Require a subclass of {!r}, got {!r}."
+ raise TypeError(emsg.format(BaseMetadata.__name__, cls))
+
+ #: The metadata class to be manufactured by this factory.
+ self.cls = cls
+
+ # Initialise the metadata class fields in the instance.
+ for field in self.fields:
+ setattr(self, field, None)
+
+ # Populate with provided kwargs, which have already been verified
+ # by the factory.
+ for field, value in kwargs.items():
+ setattr(self, field, value)
+
+ def __eq__(self, other):
+ if not hasattr(other, "cls"):
+ return NotImplemented
+ match = self.cls is other.cls
+ if match:
+ match = self.values == other.values
+
+ return match
+
+ def __getstate__(self):
+ """Return the instance state to be pickled."""
+ return {field: getattr(self, field) for field in self.fields}
+
+ def __ne__(self, other):
+ match = self.__eq__(other)
+ if match is not NotImplemented:
+ match = not match
+
+ return match
+
+ def __reduce__(self):
+ """
+ Dynamically created classes at runtime cannot be pickled, due to not
+ being defined at the top level of a module. As a result, we require to
+ use the __reduce__ interface to allow 'pickle' to recreate this class
+ instance, and dump and load instance state successfully.
+
+ """
+ return metadata_manager_factory, (self.cls,), self.__getstate__()
+
+ def __repr__(self):
+ args = ", ".join(
+ [
+ "{}={!r}".format(field, getattr(self, field))
+ for field in self.fields
+ ]
+ )
+ return "{}({})".format(self.__class__.__name__, args)
+
+ def __setstate__(self, state):
+ """Set the instance state when unpickling."""
+ for field, value in state.items():
+ setattr(self, field, value)
+
+ @property
+ def fields(self):
+ """Return the name of the metadata members."""
+ # Proxy for built-in namedtuple._fields property.
+ return self.cls._fields
+
+ @property
+ def values(self):
+ fields = {field: getattr(self, field) for field in self.fields}
+ return self.cls(**fields)
+
+ # Restrict factory to appropriate metadata classes only.
+ if not issubclass(cls, BaseMetadata):
+ emsg = "Require a subclass of {!r}, got {!r}."
+ raise TypeError(emsg.format(BaseMetadata.__name__, cls))
+
+ # Check whether kwargs have valid fields for the specified metadata.
+ if kwargs:
+ extra = [field for field in kwargs.keys() if field not in cls._fields]
+ if extra:
+ bad = ", ".join(map(lambda field: "{!r}".format(field), extra))
+ emsg = "Invalid {!r} field parameters, got {}."
+ raise ValueError(emsg.format(cls.__name__, bad))
+
+ # Define the name, (inheritance) bases and namespace of the dynamic class.
+ name = "MetadataManager"
+ bases = ()
+ namespace = {
+ "DEFAULT_NAME": cls.DEFAULT_NAME,
+ "__init__": __init__,
+ "__eq__": __eq__,
+ "__getstate__": __getstate__,
+ "__ne__": __ne__,
+ "__reduce__": __reduce__,
+ "__repr__": __repr__,
+ "__setstate__": __setstate__,
+ "fields": fields,
+ "name": cls.name,
+ "token": cls.token,
+ "values": values,
+ }
+
+ # Account for additional "CubeMetadata" specialised class behaviour.
+ if cls is CubeMetadata:
+ namespace["_names"] = cls._names
+
+ # Dynamically create the class.
+ Metadata = type(name, bases, namespace)
+ # Now manufacture an instance of that class.
+ metadata = Metadata(cls, **kwargs)
+
+ return metadata
+
+
+#: Convenience collection of lenient metadata combine services.
+SERVICES_COMBINE = (
+ AncillaryVariableMetadata.combine,
+ BaseMetadata.combine,
+ CellMeasureMetadata.combine,
+ CoordMetadata.combine,
+ CubeMetadata.combine,
+ DimCoordMetadata.combine,
+)
+
+
+#: Convenience collection of lenient metadata difference services.
+SERVICES_DIFFERENCE = (
+ AncillaryVariableMetadata.difference,
+ BaseMetadata.difference,
+ CellMeasureMetadata.difference,
+ CoordMetadata.difference,
+ CubeMetadata.difference,
+ DimCoordMetadata.difference,
+)
+
+
+#: Convenience collection of lenient metadata equality services.
+SERVICES_EQUAL = (
+ AncillaryVariableMetadata.__eq__,
+ AncillaryVariableMetadata.equal,
+ BaseMetadata.__eq__,
+ BaseMetadata.equal,
+ CellMeasureMetadata.__eq__,
+ CellMeasureMetadata.equal,
+ CoordMetadata.__eq__,
+ CoordMetadata.equal,
+ CubeMetadata.__eq__,
+ CubeMetadata.equal,
+ DimCoordMetadata.__eq__,
+ DimCoordMetadata.equal,
+)
+
+
+#: Convenience collection of lenient metadata services.
+SERVICES = SERVICES_COMBINE + SERVICES_DIFFERENCE + SERVICES_EQUAL
diff --git a/lib/iris/_cube_coord_common.py b/lib/iris/common/mixin.py
similarity index 51%
rename from lib/iris/_cube_coord_common.py
rename to lib/iris/common/mixin.py
index 541780ca15..50ef561036 100644
--- a/lib/iris/_cube_coord_common.py
+++ b/lib/iris/common/mixin.py
@@ -5,43 +5,20 @@
# licensing details.
-from collections import namedtuple
+from collections.abc import Mapping
+from functools import wraps
import re
import cf_units
+from iris.common import BaseMetadata
import iris.std_names
-# https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name
-_TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""")
+__all__ = ["CFVariableMixin"]
-class Names(
- namedtuple("Names", ["standard_name", "long_name", "var_name", "STASH"])
-):
- """
- Immutable container for name metadata.
-
- Args:
-
- * standard_name:
- A string representing the CF Conventions and Metadata standard name, or
- None.
- * long_name:
- A string representing the CF Conventions and Metadata long name, or
- None
- * var_name:
- A string representing the associated NetCDF variable name, or None.
- * STASH:
- A string representing the `~iris.fileformats.pp.STASH` code, or None.
-
- """
-
- __slots__ = ()
-
-
-def get_valid_standard_name(name):
+def _get_valid_standard_name(name):
# Standard names are optionally followed by a standard name
# modifier, separated by one or more blank spaces
@@ -100,7 +77,7 @@ def __init__(self, *args, **kwargs):
# Check validity of keys
for key in self.keys():
if key in self._forbidden_keys:
- raise ValueError("%r is not a permitted attribute" % key)
+ raise ValueError(f"{key!r} is not a permitted attribute")
def __eq__(self, other):
# Extend equality to allow for NumPy arrays.
@@ -121,7 +98,7 @@ def __ne__(self, other):
def __setitem__(self, key, value):
if key in self._forbidden_keys:
- raise ValueError("%r is not a permitted attribute" % key)
+ raise ValueError(f"{key!r} is not a permitted attribute")
dict.__setitem__(self, key, value)
def update(self, other, **kwargs):
@@ -137,92 +114,15 @@ def update(self, other, **kwargs):
# Check validity of keys
for key in keys:
if key in self._forbidden_keys:
- raise ValueError("%r is not a permitted attribute" % key)
+ raise ValueError(f"{key!r} is not a permitted attribute")
dict.update(self, other, **kwargs)
class CFVariableMixin:
-
- _DEFAULT_NAME = "unknown" # the name default string
-
- @staticmethod
- def token(name):
- """
- Determine whether the provided name is a valid NetCDF name and thus
- safe to represent a single parsable token.
-
- Args:
-
- * name:
- The string name to verify
-
- Returns:
- The provided name if valid, otherwise None.
-
- """
- if name is not None:
- result = _TOKEN_PARSE.match(name)
- name = result if result is None else name
- return name
-
- def name(self, default=None, token=False):
- """
- Returns a human-readable name.
-
- First it tries :attr:`standard_name`, then 'long_name', then
- 'var_name', then the STASH attribute before falling back to
- the value of `default` (which itself defaults to 'unknown').
-
- Kwargs:
-
- * default:
- The value of the default name.
- * token:
- If true, ensure that the name returned satisfies the criteria for
- the characters required by a valid NetCDF name. If it is not
- possible to return a valid name, then a ValueError exception is
- raised.
-
- Returns:
- String.
-
- """
-
- def _check(item):
- return self.token(item) if token else item
-
- default = self._DEFAULT_NAME if default is None else default
-
- result = (
- _check(self.standard_name)
- or _check(self.long_name)
- or _check(self.var_name)
- or _check(str(self.attributes.get("STASH", "")))
- or _check(default)
- )
-
- if token and result is None:
- emsg = "Cannot retrieve a valid name token from {!r}"
- raise ValueError(emsg.format(self))
-
- return result
-
- @property
- def names(self):
- """
- A tuple containing all of the metadata names. This includes the
- standard name, long name, NetCDF variable name, and attributes
- STASH name.
-
- """
- standard_name = self.standard_name
- long_name = self.long_name
- var_name = self.var_name
- stash_name = self.attributes.get("STASH")
- if stash_name is not None:
- stash_name = str(stash_name)
- return Names(standard_name, long_name, var_name, stash_name)
+ @wraps(BaseMetadata.name)
+ def name(self, default=None, token=None):
+ return self._metadata_manager.name(default=default, token=token)
def rename(self, name):
"""
@@ -245,40 +145,99 @@ def rename(self, name):
@property
def standard_name(self):
- """The standard name for the Cube's data."""
- return self._standard_name
+ """The CF Metadata standard name for the object."""
+ return self._metadata_manager.standard_name
@standard_name.setter
def standard_name(self, name):
- self._standard_name = get_valid_standard_name(name)
+ self._metadata_manager.standard_name = _get_valid_standard_name(name)
@property
- def units(self):
- """The :mod:`~cf_units.Unit` instance of the object."""
- return self._units
+ def long_name(self):
+ """The CF Metadata long name for the object."""
+ return self._metadata_manager.long_name
- @units.setter
- def units(self, unit):
- self._units = cf_units.as_unit(unit)
+ @long_name.setter
+ def long_name(self, name):
+ self._metadata_manager.long_name = name
@property
def var_name(self):
- """The netCDF variable name for the object."""
- return self._var_name
+ """The NetCDF variable name for the object."""
+ return self._metadata_manager.var_name
@var_name.setter
def var_name(self, name):
if name is not None:
- result = self.token(name)
+ result = self._metadata_manager.token(name)
if result is None or not name:
emsg = "{!r} is not a valid NetCDF variable name."
raise ValueError(emsg.format(name))
- self._var_name = name
+ self._metadata_manager.var_name = name
+
+ @property
+ def units(self):
+ """The S.I. unit of the object."""
+ return self._metadata_manager.units
+
+ @units.setter
+ def units(self, unit):
+ self._metadata_manager.units = cf_units.as_unit(unit)
@property
def attributes(self):
- return self._attributes
+ return self._metadata_manager.attributes
@attributes.setter
def attributes(self, attributes):
- self._attributes = LimitedAttributeDict(attributes or {})
+ self._metadata_manager.attributes = LimitedAttributeDict(
+ attributes or {}
+ )
+
+ @property
+ def metadata(self):
+ return self._metadata_manager.values
+
+ @metadata.setter
+ def metadata(self, metadata):
+ cls = self._metadata_manager.cls
+ fields = self._metadata_manager.fields
+ arg = metadata
+
+ try:
+ # Try dict-like initialisation...
+ metadata = cls(**metadata)
+ except TypeError:
+ try:
+ # Try iterator/namedtuple-like initialisation...
+ metadata = cls(*metadata)
+ except TypeError:
+ if hasattr(metadata, "_asdict"):
+ metadata = metadata._asdict()
+
+ if isinstance(metadata, Mapping):
+ fields = [field for field in fields if field in metadata]
+ else:
+ # Generic iterable/container with no associated keys.
+ missing = [
+ field
+ for field in fields
+ if not hasattr(metadata, field)
+ ]
+
+ if missing:
+ missing = ", ".join(
+ map(lambda i: "{!r}".format(i), missing)
+ )
+ emsg = "Invalid {!r} metadata, require {} to be specified."
+ raise TypeError(emsg.format(type(arg), missing))
+
+ for field in fields:
+ if hasattr(metadata, field):
+ value = getattr(metadata, field)
+ else:
+ value = metadata[field]
+
+ # Ensure to always set state through the individual mixin/container
+ # setter functions.
+ setattr(self, field, value)
diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py
new file mode 100644
index 0000000000..7098eaa65e
--- /dev/null
+++ b/lib/iris/common/resolve.py
@@ -0,0 +1,1542 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+
+from collections import namedtuple
+from collections.abc import Iterable
+import logging
+
+from dask.array.core import broadcast_shapes
+import numpy as np
+
+from iris.common import LENIENT
+
+
+__all__ = ["Resolve"]
+
+
+# Configure the logger.
+logger = logging.getLogger(__name__)
+
+
+_AuxCoverage = namedtuple(
+ "AuxCoverage",
+ [
+ "cube",
+ "common_items_aux",
+ "common_items_scalar",
+ "local_items_aux",
+ "local_items_scalar",
+ "dims_common",
+ "dims_local",
+ "dims_free",
+ ],
+)
+
+_CategoryItems = namedtuple(
+ "CategoryItems", ["items_dim", "items_aux", "items_scalar"],
+)
+
+_DimCoverage = namedtuple(
+ "DimCoverage",
+ ["cube", "metadata", "coords", "dims_common", "dims_local", "dims_free"],
+)
+
+_Item = namedtuple("Item", ["metadata", "coord", "dims"])
+
+_PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"])
+
+_PreparedItem = namedtuple(
+ "PreparedItem", ["metadata", "points", "bounds", "dims", "container"],
+)
+
+_PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"])
+
+
+class Resolve:
+ def __init__(self, lhs=None, rhs=None):
+ if lhs is not None or rhs is not None:
+ self(lhs, rhs)
+
+ def __call__(self, lhs, rhs):
+ self._init(lhs, rhs)
+
+ self._metadata_resolve()
+ self._metadata_coverage()
+
+ if self._debug:
+ self._debug_items(self.lhs_cube_category_local, title="LHS local")
+ self._debug_items(self.rhs_cube_category_local, title="RHS local")
+ self._debug_items(self.category_common, title="common")
+ logger.debug(f"map_rhs_to_lhs={self.map_rhs_to_lhs}")
+
+ self._metadata_mapping()
+ self._metadata_prepare()
+
+ def _as_compatible_cubes(self):
+ from iris.cube import Cube
+
+ src_cube = self._src_cube
+ tgt_cube = self._tgt_cube
+
+ # Use the mapping to calculate the new src cube shape.
+ new_src_shape = [1] * tgt_cube.ndim
+ for src_dim, tgt_dim in self.mapping.items():
+ new_src_shape[tgt_dim] = src_cube.shape[src_dim]
+ new_src_shape = tuple(new_src_shape)
+ dmsg = (
+ f"new src {self._src_cube_position} cube shape {new_src_shape}, "
+ f"actual shape {src_cube.shape}"
+ )
+ logger.debug(dmsg)
+
+ try:
+ # Determine whether the tgt cube shape and proposed new src
+ # cube shape will successfully broadcast together.
+ self._broadcast_shape = broadcast_shapes(
+ tgt_cube.shape, new_src_shape
+ )
+ except ValueError:
+ emsg = (
+ "Cannot resolve cubes, as a suitable transpose of the "
+ f"{self._src_cube_position} cube {src_cube.name()!r} "
+ f"will not broadcast with the {self._tgt_cube_position} cube "
+ f"{tgt_cube.name()!r}."
+ )
+ raise ValueError(emsg)
+
+ new_src_data = src_cube.core_data().copy()
+
+ # Use the mapping to determine the transpose sequence of
+ # src dimensions in increasing tgt dimension order.
+ order = [
+ src_dim
+ for src_dim, tgt_dim in sorted(
+ self.mapping.items(), key=lambda pair: pair[1]
+ )
+ ]
+
+ # Determine whether a transpose of the src cube is necessary.
+ if order != sorted(order):
+ new_src_data = new_src_data.transpose(order)
+ logger.debug(
+ f"transpose src {self._src_cube_position} cube with order {order}"
+ )
+
+ # Determine whether a reshape is necessary.
+ if new_src_shape != new_src_data.shape:
+ new_src_data = new_src_data.reshape(new_src_shape)
+ logger.debug(
+ f"reshape src {self._src_cube_position} cube to new shape {new_src_shape}"
+ )
+
+ # Create the new src cube.
+ new_src_cube = Cube(new_src_data)
+ new_src_cube.metadata = src_cube.metadata
+
+ def add_coord(coord, dim_coord=False):
+ src_dims = src_cube.coord_dims(coord)
+ tgt_dims = [self.mapping[src_dim] for src_dim in src_dims]
+ if dim_coord:
+ new_src_cube.add_dim_coord(coord, tgt_dims)
+ else:
+ new_src_cube.add_aux_coord(coord, tgt_dims)
+
+ # Add the dim coordinates to the new src cube.
+ for coord in src_cube.dim_coords:
+ add_coord(coord, dim_coord=True)
+
+ # Add the aux and scalar coordinates to the new src cube.
+ for coord in src_cube.aux_coords:
+ add_coord(coord)
+
+ # Add the aux factories to the new src cube.
+ for factory in src_cube.aux_factories:
+ new_src_cube.add_aux_factory(factory)
+
+ # Set the resolved cubes.
+ self._src_cube_resolved = new_src_cube
+ self._tgt_cube_resolved = tgt_cube
+
+ @staticmethod
+ def _aux_coverage(
+ cube,
+ cube_items_aux,
+ cube_items_scalar,
+ common_aux_metadata,
+ common_scalar_metadata,
+ ):
+ common_items_aux = []
+ common_items_scalar = []
+ local_items_aux = []
+ local_items_scalar = []
+ dims_common = []
+ dims_local = []
+ dims_free = set(range(cube.ndim))
+
+ for item in cube_items_aux:
+ [dims_free.discard(dim) for dim in item.dims]
+
+ if item.metadata in common_aux_metadata:
+ common_items_aux.append(item)
+ dims_common.extend(item.dims)
+ else:
+ local_items_aux.append(item)
+ dims_local.extend(item.dims)
+
+ for item in cube_items_scalar:
+ if item.metadata in common_scalar_metadata:
+ common_items_scalar.append(item)
+ else:
+ local_items_scalar.append(item)
+
+ return _AuxCoverage(
+ cube=cube,
+ common_items_aux=common_items_aux,
+ common_items_scalar=common_items_scalar,
+ local_items_aux=local_items_aux,
+ local_items_scalar=local_items_scalar,
+ dims_common=sorted(set(dims_common)),
+ dims_local=sorted(set(dims_local)),
+ dims_free=sorted(dims_free),
+ )
+
+ def _aux_mapping(self, src_coverage, tgt_coverage):
+ for tgt_item in tgt_coverage.common_items_aux:
+ # Search for a src aux metadata match.
+ tgt_metadata = tgt_item.metadata
+ src_items = tuple(
+ filter(
+ lambda src_item: src_item.metadata == tgt_metadata,
+ src_coverage.common_items_aux,
+ )
+ )
+ if src_items:
+ # Multiple matching src metadata must cover the same src
+ # dimensions.
+ src_dims = src_items[0].dims
+ if all(map(lambda item: item.dims == src_dims, src_items)):
+ # Ensure src and tgt have equal rank.
+ tgt_dims = tgt_item.dims
+ if len(src_dims) == len(tgt_dims):
+ for src_dim, tgt_dim in zip(src_dims, tgt_dims):
+ self.mapping[src_dim] = tgt_dim
+ logger.debug(f"{src_dim}->{tgt_dim}")
+ else:
+ # This situation can only occur due to a systemic internal
+ # failure to correctly identify common aux coordinate metadata
+ # coverage between the cubes.
+ emsg = (
+ "Failed to map common aux coordinate metadata from "
+ "source cube {!r} to target cube {!r}, using {!r} on "
+ "target cube dimension{} {}."
+ )
+ raise ValueError(
+ emsg.format(
+ src_coverage.cube.name(),
+ tgt_coverage.cube.name(),
+ tgt_metadata,
+ "s" if len(tgt_item.dims) > 1 else "",
+ tgt_item.dims,
+ )
+ )
+
+ @staticmethod
+ def _categorise_items(cube):
+ category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[])
+
+ # Categorise the dim coordinates of the cube.
+ for coord in cube.dim_coords:
+ item = _Item(
+ metadata=coord.metadata,
+ coord=coord,
+ dims=cube.coord_dims(coord),
+ )
+ category.items_dim.append(item)
+
+ # Categorise the aux and scalar coordinates of the cube.
+ for coord in cube.aux_coords:
+ dims = cube.coord_dims(coord)
+ item = _Item(metadata=coord.metadata, coord=coord, dims=dims)
+ if dims:
+ category.items_aux.append(item)
+ else:
+ category.items_scalar.append(item)
+
+ return category
+
+ @staticmethod
+ def _create_prepared_item(coord, dims, src=None, tgt=None):
+ if src is not None and tgt is not None:
+ combined = src.combine(tgt)
+ else:
+ combined = src or tgt
+ if not isinstance(dims, Iterable):
+ dims = (dims,)
+ prepared_metadata = _PreparedMetadata(
+ combined=combined, src=src, tgt=tgt
+ )
+ bounds = coord.bounds
+ result = _PreparedItem(
+ metadata=prepared_metadata,
+ points=coord.points.copy(),
+ bounds=bounds if bounds is None else bounds.copy(),
+ dims=dims,
+ container=type(coord),
+ )
+ return result
+
+ @property
+ def _debug(self):
+ result = False
+ level = logger.getEffectiveLevel()
+ if level != logging.NOTSET:
+ result = logging.DEBUG >= level
+ return result
+
+ @staticmethod
+ def _debug_items(items, title=None):
+ def _show(items, heading):
+ logger.debug(f"{title}{heading}:")
+ for item in items:
+ dmsg = f"metadata={item.metadata}, dims={item.dims}, bounds={item.coord.has_bounds()}"
+ logger.debug(dmsg)
+
+ title = f"{title} " if title else ""
+ _show(items.items_dim, "dim")
+ _show(items.items_aux, "aux")
+ _show(items.items_scalar, "scalar")
+
+ @staticmethod
+ def _dim_coverage(cube, cube_items_dim, common_dim_metadata):
+ ndim = cube.ndim
+ metadata = [None] * ndim
+ coords = [None] * ndim
+ dims_common = []
+ dims_local = []
+ dims_free = set(range(ndim))
+
+ for item in cube_items_dim:
+ (dim,) = item.dims
+ dims_free.discard(dim)
+ metadata[dim] = item.metadata
+ coords[dim] = item.coord
+ if item.metadata in common_dim_metadata:
+ dims_common.append(dim)
+ else:
+ dims_local.append(dim)
+
+ return _DimCoverage(
+ cube=cube,
+ metadata=metadata,
+ coords=coords,
+ dims_common=sorted(dims_common),
+ dims_local=sorted(dims_local),
+ dims_free=sorted(dims_free),
+ )
+
+ def _dim_mapping(self, src_coverage, tgt_coverage):
+ for tgt_dim in tgt_coverage.dims_common:
+ # Search for a src dim metadata match.
+ tgt_metadata = tgt_coverage.metadata[tgt_dim]
+ try:
+ src_dim = src_coverage.metadata.index(tgt_metadata)
+ self.mapping[src_dim] = tgt_dim
+ logger.debug(f"{src_dim}->{tgt_dim}")
+ except ValueError:
+ # This exception can only occur due to a systemic internal
+ # failure to correctly identify common dim coordinate metadata
+ # coverage between the cubes.
+ emsg = (
+ "Failed to map common dim coordinate metadata from "
+ "source cube {!r} to target cube {!r}, using {!r} on "
+ "target cube dimension {}."
+ )
+ raise ValueError(
+ emsg.format(
+ src_coverage.cube.name(),
+ tgt_coverage.cube.name(),
+ tgt_metadata,
+ tuple([tgt_dim]),
+ )
+ )
+
+ def _free_mapping(
+ self,
+ src_dim_coverage,
+ tgt_dim_coverage,
+ src_aux_coverage,
+ tgt_aux_coverage,
+ ):
+ src_cube = src_dim_coverage.cube
+ tgt_cube = tgt_dim_coverage.cube
+ src_ndim = src_cube.ndim
+ tgt_ndim = tgt_cube.ndim
+
+ # mapping src to tgt, involving free dimensions on either the src/tgt.
+ free_mapping = {}
+
+ # Determine the src/tgt dimensions that are not mapped,
+ # and not covered by any metadata.
+ src_free = set(src_dim_coverage.dims_free) & set(
+ src_aux_coverage.dims_free
+ )
+ tgt_free = set(tgt_dim_coverage.dims_free) & set(
+ tgt_aux_coverage.dims_free
+ )
+
+ if src_free or tgt_free:
+ # Determine the src/tgt dimensions that are not mapped.
+ src_unmapped = set(range(src_ndim)) - set(self.mapping)
+ tgt_unmapped = set(range(tgt_ndim)) - set(self.mapping.values())
+
+ # Determine the src/tgt dimensions that are not mapped,
+ # but are covered by a src/tgt local coordinate.
+ src_unmapped_local = src_unmapped - src_free
+ tgt_unmapped_local = tgt_unmapped - tgt_free
+
+ src_shape = src_cube.shape
+ tgt_shape = tgt_cube.shape
+ src_max, tgt_max = max(src_shape), max(tgt_shape)
+
+ def assign_mapping(extent, unmapped_local_items, free_items=None):
+ result = None
+ if free_items is None:
+ free_items = []
+ if extent == 1:
+ if unmapped_local_items:
+ result, _ = unmapped_local_items.pop(0)
+ elif free_items:
+ result, _ = free_items.pop(0)
+ else:
+
+ def _filter(items):
+ return list(
+ filter(lambda item: item[1] == extent, items)
+ )
+
+ def _pop(item, items):
+ result, _ = item
+ index = items.index(item)
+ items.pop(index)
+ return result
+
+ items = _filter(unmapped_local_items)
+ if items:
+ result = _pop(items[0], unmapped_local_items)
+ else:
+ items = _filter(free_items)
+ if items:
+ result = _pop(items[0], free_items)
+ return result
+
+ if src_free:
+ # Attempt to map src free dimensions to tgt unmapped local or free dimensions.
+ tgt_unmapped_local_items = [
+ (dim, tgt_shape[dim]) for dim in tgt_unmapped_local
+ ]
+ tgt_free_items = [(dim, tgt_shape[dim]) for dim in tgt_free]
+
+ for src_dim in sorted(
+ src_free, key=lambda dim: (src_max - src_shape[dim], dim)
+ ):
+ tgt_dim = assign_mapping(
+ src_shape[src_dim],
+ tgt_unmapped_local_items,
+ tgt_free_items,
+ )
+ if tgt_dim is None:
+ # Failed to map the src free dimension
+ # to a suitable tgt local/free dimension.
+ dmsg = (
+ f"failed to map src free dimension ({src_dim},) from "
+ f"{self._src_cube_position} cube {src_cube.name()!r} to "
+ f"{self._tgt_cube_position} cube {tgt_cube.name()!r}."
+ )
+ logger.debug(dmsg)
+ break
+ free_mapping[src_dim] = tgt_dim
+ else:
+ # Attempt to map tgt free dimensions to src unmapped local dimensions.
+ src_unmapped_local_items = [
+ (dim, src_shape[dim]) for dim in src_unmapped_local
+ ]
+
+ for tgt_dim in sorted(
+ tgt_free, key=lambda dim: (tgt_max - tgt_shape[dim], dim)
+ ):
+ src_dim = assign_mapping(
+ tgt_shape[tgt_dim], src_unmapped_local_items
+ )
+ if src_dim is not None:
+ free_mapping[src_dim] = tgt_dim
+ if not src_unmapped_local_items:
+ # There are no more src unmapped local dimensions.
+ break
+
+ # Determine whether there are still unmapped src dimensions.
+ src_unmapped = (
+ set(range(src_cube.ndim)) - set(self.mapping) - set(free_mapping)
+ )
+
+ if src_unmapped:
+ plural = "s" if len(src_unmapped) > 1 else ""
+ emsg = (
+ "Insufficient matching coordinate metadata to resolve cubes, "
+ f"cannot map dimension{plural} {tuple(sorted(src_unmapped))} "
+ f"of the {self._src_cube_position} cube {src_cube.name()!r} "
+ f"to the {self._tgt_cube_position} cube {tgt_cube.name()!r}."
+ )
+ raise ValueError(emsg)
+
+ # Update the mapping.
+ self.mapping.update(free_mapping)
+ logger.debug(f"mapping free dimensions gives, mapping={self.mapping}")
+
+ def _init(self, lhs, rhs):
+ from iris.cube import Cube
+
+ emsg = (
+ "{cls} requires {arg!r} argument to be a 'Cube', got {actual!r}."
+ )
+ clsname = self.__class__.__name__
+
+ if not isinstance(lhs, Cube):
+ raise TypeError(
+ emsg.format(cls=clsname, arg="LHS", actual=type(lhs))
+ )
+
+ if not isinstance(rhs, Cube):
+ raise TypeError(
+ emsg.format(cls=clsname, arg="RHS", actual=type(rhs))
+ )
+
+ # The LHS cube to be resolved into the resultant cube.
+ self.lhs_cube = lhs
+ # The RHS cube to be resolved into the resultant cube.
+ self.rhs_cube = rhs
+
+ # The transposed/reshaped (if required) LHS cube, which
+ # can be broadcast with RHS cube.
+ self.lhs_cube_resolved = None
+ # The transposed/reshaped (if required) RHS cube, which
+ # can be broadcast with LHS cube.
+ self.rhs_cube_resolved = None
+
+ # Categorised dim, aux and scalar coordinate items for LHS cube.
+ self.lhs_cube_category = None
+ # Categorised dim, aux and scalar coordinate items for RHS cube.
+ self.rhs_cube_category = None
+
+ # Categorised dim, aux and scalar coordinate items local to LHS cube only.
+ self.lhs_cube_category_local = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ # Categorised dim, aux and scalar coordinate items local to RHS cube only.
+ self.rhs_cube_category_local = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ # Categorised dim, aux and scalar coordinate items common to both
+ # LHS cube and RHS cube.
+ self.category_common = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+
+ # Analysis of dim coordinates spanning LHS cube.
+ self.lhs_cube_dim_coverage = None
+ # Analysis of aux and scalar coordinates spanning LHS cube.
+ self.lhs_cube_aux_coverage = None
+ # Analysis of dim coordinates spanning RHS cube.
+ self.rhs_cube_dim_coverage = None
+ # Analysis of aux and scalar coordinates spanning RHS cube.
+ self.rhs_cube_aux_coverage = None
+
+ # Map common metadata from RHS cube to LHS cube if LHS-rank >= RHS-rank,
+ # otherwise map common metadata from LHS cube to RHS cube.
+ if self.lhs_cube.ndim >= self.rhs_cube.ndim:
+ self.map_rhs_to_lhs = True
+ else:
+ self.map_rhs_to_lhs = False
+
+ # Mapping of the dimensions between common metadata for the cubes,
+ # where the direction of the mapping is governed by map_rhs_to_lhs.
+ self.mapping = None
+
+ # Cache containing a list of dim, aux and scalar coordinates prepared
+ # and ready for creating and attaching to the resultant cube.
+ self.prepared_category = None
+
+ # Cache containing a list of aux factories prepared and ready for
+ # creating and attaching to the resultant cube.
+ self.prepared_factories = None
+
+ # The shape of the resultant resolved cube.
+ self._broadcast_shape = None
+
+ def _metadata_coverage(self):
+ # Determine the common dim coordinate metadata coverage.
+ common_dim_metadata = [
+ item.metadata for item in self.category_common.items_dim
+ ]
+
+ self.lhs_cube_dim_coverage = self._dim_coverage(
+ self.lhs_cube,
+ self.lhs_cube_category.items_dim,
+ common_dim_metadata,
+ )
+ self.rhs_cube_dim_coverage = self._dim_coverage(
+ self.rhs_cube,
+ self.rhs_cube_category.items_dim,
+ common_dim_metadata,
+ )
+
+ # Determine the common aux and scalar coordinate metadata coverage.
+ common_aux_metadata = [
+ item.metadata for item in self.category_common.items_aux
+ ]
+ common_scalar_metadata = [
+ item.metadata for item in self.category_common.items_scalar
+ ]
+
+ self.lhs_cube_aux_coverage = self._aux_coverage(
+ self.lhs_cube,
+ self.lhs_cube_category.items_aux,
+ self.lhs_cube_category.items_scalar,
+ common_aux_metadata,
+ common_scalar_metadata,
+ )
+ self.rhs_cube_aux_coverage = self._aux_coverage(
+ self.rhs_cube,
+ self.rhs_cube_category.items_aux,
+ self.rhs_cube_category.items_scalar,
+ common_aux_metadata,
+ common_scalar_metadata,
+ )
+
+ def _metadata_mapping(self):
+ # Initialise the state.
+ self.mapping = {}
+
+ # Map RHS cube to LHS cube, or smaller to larger cube rank.
+ if self.map_rhs_to_lhs:
+ src_cube = self.rhs_cube
+ src_dim_coverage = self.rhs_cube_dim_coverage
+ src_aux_coverage = self.rhs_cube_aux_coverage
+ tgt_cube = self.lhs_cube
+ tgt_dim_coverage = self.lhs_cube_dim_coverage
+ tgt_aux_coverage = self.lhs_cube_aux_coverage
+ else:
+ src_cube = self.lhs_cube
+ src_dim_coverage = self.lhs_cube_dim_coverage
+ src_aux_coverage = self.lhs_cube_aux_coverage
+ tgt_cube = self.rhs_cube
+ tgt_dim_coverage = self.rhs_cube_dim_coverage
+ tgt_aux_coverage = self.rhs_cube_aux_coverage
+
+ # Use the dim coordinates to fully map the
+ # src cube dimensions to the tgt cube dimensions.
+ self._dim_mapping(src_dim_coverage, tgt_dim_coverage)
+ logger.debug(
+ f"mapping common dim coordinates gives, mapping={self.mapping}"
+ )
+
+ # If necessary, use the aux coordinates to fully map the
+ # src cube dimensions to the tgt cube dimensions.
+ if not self.mapped:
+ self._aux_mapping(src_aux_coverage, tgt_aux_coverage)
+ logger.debug(
+ f"mapping common aux coordinates, mapping={self.mapping}"
+ )
+
+ if not self.mapped:
+ # Attempt to complete the mapping using src/tgt free dimensions.
+ # Note that, this may not be possible and result in an exception.
+ self._free_mapping(
+ src_dim_coverage,
+ tgt_dim_coverage,
+ src_aux_coverage,
+ tgt_aux_coverage,
+ )
+
+ # Attempt to transpose/reshape the cubes into compatible broadcast shapes.
+ # Note that, this may not be possible and result in an exception.
+ self._as_compatible_cubes()
+
+ # Given the resultant broadcast shape, determine whether the
+ # mapping requires to be reversed.
+ broadcast_flip = (
+ src_cube.ndim == tgt_cube.ndim
+ and self._tgt_cube_resolved.shape != self.shape
+ and self._src_cube_resolved.shape == self.shape
+ )
+
+ # Given the number of free dimensions, determine whether the
+ # mapping requires to be reversed.
+ src_free = set(src_dim_coverage.dims_free) & set(
+ src_aux_coverage.dims_free
+ )
+ tgt_free = set(tgt_dim_coverage.dims_free) & set(
+ tgt_aux_coverage.dims_free
+ )
+ free_flip = len(tgt_free) > len(src_free)
+
+ # Reverse the mapping direction.
+ if broadcast_flip or free_flip:
+ flip_mapping = {
+ tgt_dim: src_dim for src_dim, tgt_dim in self.mapping.items()
+ }
+ self.map_rhs_to_lhs = not self.map_rhs_to_lhs
+ dmsg = (
+ f"reversing the mapping from {self.mapping} to {flip_mapping}, "
+ f"now map_rhs_to_lhs={self.map_rhs_to_lhs}"
+ )
+ logger.debug(dmsg)
+ self.mapping = flip_mapping
+ # Now require to transpose/reshape the cubes into compatible
+ # broadcast cubes again, due to possible non-commutative behaviour
+ # after reversing the mapping direction.
+ self._as_compatible_cubes()
+
+ def _metadata_prepare(self):
+ # Initialise the state.
+ self.prepared_category = _CategoryItems(
+ items_dim=[], items_aux=[], items_scalar=[]
+ )
+ self.prepared_factories = []
+
+ # Map RHS cube to LHS cube, or smaller to larger cube rank.
+ if self.map_rhs_to_lhs:
+ src_cube = self.rhs_cube
+ src_category_local = self.rhs_cube_category_local
+ src_dim_coverage = self.rhs_cube_dim_coverage
+ src_aux_coverage = self.rhs_cube_aux_coverage
+ tgt_cube = self.lhs_cube
+ tgt_category_local = self.lhs_cube_category_local
+ tgt_dim_coverage = self.lhs_cube_dim_coverage
+ tgt_aux_coverage = self.lhs_cube_aux_coverage
+ else:
+ src_cube = self.lhs_cube
+ src_category_local = self.lhs_cube_category_local
+ src_dim_coverage = self.lhs_cube_dim_coverage
+ src_aux_coverage = self.lhs_cube_aux_coverage
+ tgt_cube = self.rhs_cube
+ tgt_category_local = self.rhs_cube_category_local
+ tgt_dim_coverage = self.rhs_cube_dim_coverage
+ tgt_aux_coverage = self.rhs_cube_aux_coverage
+
+ # Determine the resultant cube dim coordinate/s.
+ self._prepare_common_dim_payload(src_dim_coverage, tgt_dim_coverage)
+
+ # Determine the resultant cube aux coordinate/s.
+ self._prepare_common_aux_payload(
+ src_aux_coverage.common_items_aux, # input
+ tgt_aux_coverage.common_items_aux, # input
+ self.prepared_category.items_aux, # output
+ )
+
+ # Determine the resultant cube scalar coordinate/s.
+ self._prepare_common_aux_payload(
+ src_aux_coverage.common_items_scalar, # input
+ tgt_aux_coverage.common_items_scalar, # input
+ self.prepared_category.items_scalar, # output
+ ignore_mismatch=True,
+ )
+
+ self._prepare_local_payload(
+ src_dim_coverage,
+ src_aux_coverage,
+ tgt_dim_coverage,
+ tgt_aux_coverage,
+ )
+
+ self._prepare_factory_payload(
+ tgt_cube, tgt_category_local, from_src=False
+ )
+ self._prepare_factory_payload(src_cube, src_category_local)
+
+ def _metadata_resolve(self):
+ """
+ Categorise the coordinate metadata of the cubes into three distinct
+ groups; metadata from coordinates only available (local) on the LHS
+ cube, metadata from coordinates only available (local) on the RHS
+ cube, and metadata from coordinates common to both the LHS and RHS
+ cubes.
+
+ This is only applicable to coordinates that are members of the
+ 'aux_coords' or 'dim_coords' of the participating cubes.
+
+ """
+
+ # Determine the cube dim, aux and scalar coordinate items
+ # for each individual cube.
+ self.lhs_cube_category = self._categorise_items(self.lhs_cube)
+ self.rhs_cube_category = self._categorise_items(self.rhs_cube)
+
+ def _categorise(
+ lhs_items,
+ rhs_items,
+ lhs_local_items,
+ rhs_local_items,
+ common_items,
+ ):
+ rhs_items_metadata = [item.metadata for item in rhs_items]
+ # Track common metadata here as a temporary convenience.
+ common_metadata = []
+
+ # Determine items local to the lhs, and shared items
+ # common to both lhs and rhs.
+ for item in lhs_items:
+ metadata = item.metadata
+ if metadata in rhs_items_metadata:
+ # The metadata is common between lhs and rhs.
+ if metadata not in common_metadata:
+ common_items.append(item)
+ common_metadata.append(metadata)
+ else:
+ # The metadata is local to the lhs.
+ lhs_local_items.append(item)
+
+ # Determine items local to the rhs.
+ for item in rhs_items:
+ if item.metadata not in common_metadata:
+ rhs_local_items.append(item)
+
+ # Determine local and common dim category items.
+ _categorise(
+ self.lhs_cube_category.items_dim, # input
+ self.rhs_cube_category.items_dim, # input
+ self.lhs_cube_category_local.items_dim, # output
+ self.rhs_cube_category_local.items_dim, # output
+ self.category_common.items_dim, # output
+ )
+
+ # Determine local and common aux category items.
+ _categorise(
+ self.lhs_cube_category.items_aux, # input
+ self.rhs_cube_category.items_aux, # input
+ self.lhs_cube_category_local.items_aux, # output
+ self.rhs_cube_category_local.items_aux, # output
+ self.category_common.items_aux, # output
+ )
+
+ # Determine local and common scalar category items.
+ _categorise(
+ self.lhs_cube_category.items_scalar, # input
+ self.rhs_cube_category.items_scalar, # input
+ self.lhs_cube_category_local.items_scalar, # output
+ self.rhs_cube_category_local.items_scalar, # output
+ self.category_common.items_scalar, # output
+ )
+
+ # Sort the resultant categories by metadata name for consistency,
+ # in-place.
+ categories = (
+ self.lhs_cube_category,
+ self.rhs_cube_category,
+ self.lhs_cube_category_local,
+ self.rhs_cube_category_local,
+ self.category_common,
+ )
+ key_func = lambda item: item.metadata.name()
+
+ for category in categories:
+ category.items_dim.sort(key=key_func)
+ category.items_aux.sort(key=key_func)
+ category.items_scalar.sort(key=key_func)
+
+ def _prepare_common_aux_payload(
+ self,
+ src_common_items,
+ tgt_common_items,
+ prepared_items,
+ ignore_mismatch=None,
+ ):
+ from iris.coords import AuxCoord
+
+ if ignore_mismatch is None:
+ # Configure ability to ignore coordinate points/bounds
+ # mismatches between common items.
+ ignore_mismatch = False
+
+ for src_item in src_common_items:
+ src_metadata = src_item.metadata
+ tgt_items = tuple(
+ filter(
+ lambda tgt_item: tgt_item.metadata == src_metadata,
+ tgt_common_items,
+ )
+ )
+ if not tgt_items:
+ dmsg = (
+ f"ignoring src {self._src_cube_position} cube aux coordinate "
+ f"{src_metadata}, does not match any common tgt "
+ f"{self._tgt_cube_position} cube aux coordinate metadata"
+ )
+ logger.debug(dmsg)
+ elif len(tgt_items) > 1:
+ dmsg = (
+ f"ignoring src {self._src_cube_position} cube aux coordinate "
+ f"{src_metadata}, matches multiple [{len(tgt_items)}] common "
+ f"tgt {self._tgt_cube_position} cube aux coordinate metadata"
+ )
+ logger.debug(dmsg)
+ else:
+ (tgt_item,) = tgt_items
+ src_coord = src_item.coord
+ tgt_coord = tgt_item.coord
+ points, bounds = self._prepare_points_and_bounds(
+ src_coord,
+ tgt_coord,
+ src_item.dims,
+ tgt_item.dims,
+ ignore_mismatch=ignore_mismatch,
+ )
+ if points is not None:
+ src_type = type(src_coord)
+ tgt_type = type(tgt_coord)
+ # Downcast to aux if there are mixed container types.
+ container = src_type if src_type is tgt_type else AuxCoord
+ prepared_metadata = _PreparedMetadata(
+ combined=src_metadata.combine(tgt_item.metadata),
+ src=src_metadata,
+ tgt=tgt_item.metadata,
+ )
+ prepared_item = _PreparedItem(
+ metadata=prepared_metadata,
+ points=points.copy(),
+ bounds=bounds if bounds is None else bounds.copy(),
+ dims=tgt_item.dims,
+ container=container,
+ )
+ prepared_items.append(prepared_item)
+
+ def _prepare_common_dim_payload(
+ self, src_coverage, tgt_coverage, ignore_mismatch=None
+ ):
+ from iris.coords import DimCoord
+
+ if ignore_mismatch is None:
+ # Configure ability to ignore coordinate points/bounds
+ # mismatches between common items.
+ ignore_mismatch = False
+
+ for src_dim in src_coverage.dims_common:
+ src_metadata = src_coverage.metadata[src_dim]
+ src_coord = src_coverage.coords[src_dim]
+
+ tgt_dim = self.mapping[src_dim]
+ tgt_metadata = tgt_coverage.metadata[tgt_dim]
+ tgt_coord = tgt_coverage.coords[tgt_dim]
+
+ points, bounds = self._prepare_points_and_bounds(
+ src_coord,
+ tgt_coord,
+ src_dim,
+ tgt_dim,
+ ignore_mismatch=ignore_mismatch,
+ )
+
+ if points is not None:
+ prepared_metadata = _PreparedMetadata(
+ combined=src_metadata.combine(tgt_metadata),
+ src=src_metadata,
+ tgt=tgt_metadata,
+ )
+ prepared_item = _PreparedItem(
+ metadata=prepared_metadata,
+ points=points.copy(),
+ bounds=bounds if bounds is None else bounds.copy(),
+ dims=(tgt_dim,),
+ container=DimCoord,
+ )
+ self.prepared_category.items_dim.append(prepared_item)
+
+ def _prepare_factory_payload(self, cube, category_local, from_src=True):
+ def _get_prepared_item(metadata, from_src=True, from_local=False):
+ result = None
+ if from_local:
+ category = category_local
+ match = lambda item: item.metadata == metadata
+ else:
+ category = self.prepared_category
+ if from_src:
+ match = lambda item: item.metadata.src == metadata
+ else:
+ match = lambda item: item.metadata.tgt == metadata
+ for member in category._fields:
+ category_items = getattr(category, member)
+ matched_items = tuple(filter(match, category_items))
+ if matched_items:
+ if len(matched_items) > 1:
+ dmsg = (
+ f"ignoring factory dependency {metadata}, multiple {'src' if from_src else 'tgt'} "
+ f"{'local' if from_local else 'prepared'} metadata matches"
+ )
+ logger.debug(dmsg)
+ else:
+ (item,) = matched_items
+ if from_local:
+ src = tgt = None
+ if from_src:
+ src = item.metadata
+ dims = tuple(
+ [self.mapping[dim] for dim in item.dims]
+ )
+ else:
+ tgt = item.metadata
+ dims = item.dims
+ result = self._create_prepared_item(
+ item.coord, dims, src=src, tgt=tgt
+ )
+ getattr(self.prepared_category, member).append(
+ result
+ )
+ else:
+ result = item
+ break
+ return result
+
+ for factory in cube.aux_factories:
+ container = type(factory)
+ dependencies = {}
+ prepared_item = None
+
+ if tuple(
+ filter(
+ lambda item: item.container is container,
+ self.prepared_factories,
+ )
+ ):
+ # debug: skipping, factory already exists
+ dmsg = (
+ f"ignoring {'src' if from_src else 'tgt'} {container}, "
+ f"a similar factory has already been prepared"
+ )
+ logger.debug(dmsg)
+ continue
+
+ for (
+ dependency_name,
+ dependency_coord,
+ ) in factory.dependencies.items():
+ metadata = dependency_coord.metadata
+ prepared_item = _get_prepared_item(metadata, from_src=from_src)
+ if prepared_item is None:
+ prepared_item = _get_prepared_item(
+ metadata, from_src=from_src, from_local=True
+ )
+ if prepared_item is None:
+ dmsg = f"cannot find matching {metadata} for {container} dependency {dependency_name}"
+ logger.debug(dmsg)
+ break
+ dependencies[dependency_name] = prepared_item.metadata
+
+ if prepared_item is not None:
+ prepared_factory = _PreparedFactory(
+ container=container, dependencies=dependencies
+ )
+ self.prepared_factories.append(prepared_factory)
+ else:
+ dmsg = f"ignoring {'src' if from_src else 'tgt'} {container}, cannot find all dependencies"
+ logger.debug(dmsg)
+
+ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage):
+ # Determine whether there are tgt dimensions not mapped to by an
+ # associated src dimension, and thus may be covered by any local
+ # tgt aux coordinates.
+ extra_tgt_dims = set(range(tgt_aux_coverage.cube.ndim)) - set(
+ self.mapping.values()
+ )
+
+ if LENIENT["maths"]:
+ mapped_src_dims = set(self.mapping.keys())
+ mapped_tgt_dims = set(self.mapping.values())
+
+ # Add local src aux coordinates.
+ for item in src_aux_coverage.local_items_aux:
+ if all([dim in mapped_src_dims for dim in item.dims]):
+ tgt_dims = tuple([self.mapping[dim] for dim in item.dims])
+ prepared_item = self._create_prepared_item(
+ item.coord, tgt_dims, src=item.metadata
+ )
+ self.prepared_category.items_aux.append(prepared_item)
+ else:
+ dmsg = (
+ f"ignoring local src {self._src_cube_position} cube "
+ f"aux coordinate {item.metadata}, as not all src "
+ f"dimensions {item.dims} are mapped"
+ )
+ logger.debug(dmsg)
+ else:
+ # For strict maths, only local tgt aux coordinates covering
+ # the extra dimensions of the tgt cube may be added.
+ mapped_tgt_dims = set()
+
+ # Add local tgt aux coordinates.
+ for item in tgt_aux_coverage.local_items_aux:
+ tgt_dims = item.dims
+ if all([dim in mapped_tgt_dims for dim in tgt_dims]) or any(
+ [dim in extra_tgt_dims for dim in tgt_dims]
+ ):
+ prepared_item = self._create_prepared_item(
+ item.coord, tgt_dims, tgt=item.metadata
+ )
+ self.prepared_category.items_aux.append(prepared_item)
+ else:
+ dmsg = (
+ f"ignoring local tgt {self._tgt_cube_position} cube "
+ f"aux coordinate {item.metadata}, as not all tgt "
+ f"dimensions {tgt_dims} are mapped"
+ )
+ logger.debug(dmsg)
+
+ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage):
+ mapped_tgt_dims = self.mapping.values()
+
+ # Determine whether there are tgt dimensions not mapped to by an
+ # associated src dimension, and thus may be covered by any local
+ # tgt dim coordinates.
+ extra_tgt_dims = set(range(tgt_dim_coverage.cube.ndim)) - set(
+ mapped_tgt_dims
+ )
+
+ if LENIENT["maths"]:
+ tgt_dims_conflict = set()
+
+ # Add local src dim coordinates.
+ for src_dim in src_dim_coverage.dims_local:
+ tgt_dim = self.mapping[src_dim]
+ # Only add the local src dim coordinate iff there is no
+ # associated local tgt dim coordinate.
+ if tgt_dim not in tgt_dim_coverage.dims_local:
+ metadata = src_dim_coverage.metadata[src_dim]
+ coord = src_dim_coverage.coords[src_dim]
+ prepared_item = self._create_prepared_item(
+ coord, tgt_dim, src=metadata
+ )
+ self.prepared_category.items_dim.append(prepared_item)
+ else:
+ tgt_dims_conflict.add(tgt_dim)
+ if self._debug:
+ src_metadata = src_dim_coverage.metadata[src_dim]
+ tgt_metadata = tgt_dim_coverage.metadata[tgt_dim]
+ dmsg = (
+ f"ignoring local src {self._src_cube_position} cube "
+ f"dim coordinate {src_metadata}, as conflicts with "
+ f"tgt {self._tgt_cube_position} cube dim coordinate "
+ f"{tgt_metadata}, mapping ({src_dim},)->({tgt_dim},)"
+ )
+ logger.debug(dmsg)
+
+ # Determine whether there are any tgt dims free to be mapped
+ # by an available local tgt dim coordinate.
+ tgt_dims_unmapped = (
+ set(tgt_dim_coverage.dims_local) - tgt_dims_conflict
+ )
+ else:
+ # For strict maths, only local tgt dim coordinates covering
+ # the extra dimensions of the tgt cube may be added.
+ tgt_dims_unmapped = extra_tgt_dims
+
+ # Add local tgt dim coordinates.
+ for tgt_dim in tgt_dims_unmapped:
+ if tgt_dim in mapped_tgt_dims or tgt_dim in extra_tgt_dims:
+ metadata = tgt_dim_coverage.metadata[tgt_dim]
+ if metadata is not None:
+ coord = tgt_dim_coverage.coords[tgt_dim]
+ prepared_item = self._create_prepared_item(
+ coord, tgt_dim, tgt=metadata
+ )
+ self.prepared_category.items_dim.append(prepared_item)
+
+ def _prepare_local_payload_scalar(
+ self, src_aux_coverage, tgt_aux_coverage
+ ):
+ # Add all local tgt scalar coordinates iff the src cube is a
+ # scalar cube with no local src scalar coordinates.
+ # Only for strict maths.
+ src_scalar_cube = (
+ not LENIENT["maths"]
+ and src_aux_coverage.cube.ndim == 0
+ and len(src_aux_coverage.local_items_scalar) == 0
+ )
+
+ if src_scalar_cube or LENIENT["maths"]:
+ # Add any local src scalar coordinates, if available.
+ for item in src_aux_coverage.local_items_scalar:
+ prepared_item = self._create_prepared_item(
+ item.coord, item.dims, src=item.metadata
+ )
+ self.prepared_category.items_scalar.append(prepared_item)
+
+ # Add any local tgt scalar coordinates, if available.
+ for item in tgt_aux_coverage.local_items_scalar:
+ prepared_item = self._create_prepared_item(
+ item.coord, item.dims, tgt=item.metadata
+ )
+ self.prepared_category.items_scalar.append(prepared_item)
+
+ def _prepare_local_payload(
+ self,
+ src_dim_coverage,
+ src_aux_coverage,
+ tgt_dim_coverage,
+ tgt_aux_coverage,
+ ):
+ # Add local src/tgt dim coordinates.
+ self._prepare_local_payload_dim(src_dim_coverage, tgt_dim_coverage)
+
+ # Add local src/tgt aux coordinates.
+ self._prepare_local_payload_aux(src_aux_coverage, tgt_aux_coverage)
+
+ # Add local src/tgt scalar coordinates.
+ self._prepare_local_payload_scalar(src_aux_coverage, tgt_aux_coverage)
+
+ def _prepare_points_and_bounds(
+ self, src_coord, tgt_coord, src_dims, tgt_dims, ignore_mismatch=None
+ ):
+ from iris.util import array_equal
+
+ if ignore_mismatch is None:
+ # Configure ability to ignore coordinate points/bounds
+ # mismatches between common items.
+ ignore_mismatch = False
+
+ points, bounds = None, None
+
+ if not isinstance(src_dims, Iterable):
+ src_dims = (src_dims,)
+
+ if not isinstance(tgt_dims, Iterable):
+ tgt_dims = (tgt_dims,)
+
+ # Deal with coordinates that have been sliced.
+ if src_coord.ndim != tgt_coord.ndim:
+ if tgt_coord.ndim > src_coord.ndim:
+ # Use the tgt coordinate points/bounds.
+ points = tgt_coord.points
+ bounds = tgt_coord.bounds
+ else:
+ # Use the src coordinate points/bounds.
+ points = src_coord.points
+ bounds = src_coord.bounds
+
+ # Deal with coordinates spanning broadcast dimensions.
+ if (
+ points is None
+ and bounds is None
+ and src_coord.shape != tgt_coord.shape
+ ):
+ # Check whether the src coordinate is broadcasting.
+ dims = tuple([self.mapping[dim] for dim in src_dims])
+ src_shape_broadcast = tuple([self.shape[dim] for dim in dims])
+ src_cube_shape = self._src_cube.shape
+ src_shape = tuple([src_cube_shape[dim] for dim in src_dims])
+ src_broadcasting = src_shape != src_shape_broadcast
+
+ # Check whether the tgt coordinate is broadcasting.
+ tgt_shape_broadcast = tuple([self.shape[dim] for dim in tgt_dims])
+ tgt_cube_shape = self._tgt_cube.shape
+ tgt_shape = tuple([tgt_cube_shape[dim] for dim in tgt_dims])
+ tgt_broadcasting = tgt_shape != tgt_shape_broadcast
+
+ if src_broadcasting and tgt_broadcasting:
+ emsg = (
+ f"Cannot broadcast the coordinate {src_coord.name()!r} on "
+ f"{self._src_cube_position} cube {self._src_cube.name()!r} and "
+ f"coordinate {tgt_coord.name()!r} on "
+ f"{self._tgt_cube_position} cube {self._tgt_cube.name()!r} to "
+ f"broadcast shape {tgt_shape_broadcast}."
+ )
+ raise ValueError(emsg)
+ elif src_broadcasting:
+ # Use the tgt coordinate points/bounds.
+ points = tgt_coord.points
+ bounds = tgt_coord.bounds
+ elif tgt_broadcasting:
+ # Use the src coordinate points/bounds.
+ points = src_coord.points
+ bounds = src_coord.bounds
+
+ if points is None and bounds is None:
+ # Note that, this also ensures shape equality.
+ eq_points = array_equal(
+ src_coord.points, tgt_coord.points, withnans=True
+ )
+ if eq_points:
+ points = src_coord.points
+ src_has_bounds = src_coord.has_bounds()
+ tgt_has_bounds = tgt_coord.has_bounds()
+
+ if src_has_bounds and tgt_has_bounds:
+ src_bounds = src_coord.bounds
+ eq_bounds = array_equal(
+ src_bounds, tgt_coord.bounds, withnans=True
+ )
+
+ if eq_bounds:
+ bounds = src_bounds
+ else:
+ if LENIENT["maths"] and ignore_mismatch:
+ # For lenient, ignore coordinate with mis-matched bounds.
+ dmsg = (
+ f"ignoring src {self._src_cube_position} cube "
+ f"{src_coord.metadata}, unequal bounds with "
+ f"tgt {self._tgt_cube_position} cube, "
+ f"{src_dims}->{tgt_dims}"
+ )
+ logger.debug(dmsg)
+ else:
+ emsg = (
+ f"Coordinate {src_coord.name()!r} has different bounds for the "
+ f"LHS cube {self.lhs_cube.name()!r} and "
+ f"RHS cube {self.rhs_cube.name()!r}."
+ )
+ raise ValueError(emsg)
+ else:
+ # For lenient, use either of the coordinate bounds, if they exist.
+ if LENIENT["maths"]:
+ if src_has_bounds:
+ dmsg = (
+ f"using src {self._src_cube_position} cube "
+ f"{src_coord.metadata} bounds, tgt has no bounds"
+ )
+ logger.debug(dmsg)
+ bounds = src_coord.bounds
+ else:
+ dmsg = (
+ f"using tgt {self._tgt_cube_position} cube "
+ f"{tgt_coord.metadata} bounds, src has no bounds"
+ )
+ logger.debug(dmsg)
+ bounds = tgt_coord.bounds
+ else:
+ # For strict, both coordinates must have bounds, or both
+ # coordinates must not have bounds.
+ if src_has_bounds:
+ emsg = (
+ f"Coordinate {src_coord.name()!r} has bounds for the "
+ f"{self._src_cube_position} cube {self._src_cube.name()!r}, "
+ f"but not the {self._tgt_cube_position} cube {self._tgt_cube.name()!r}."
+ )
+ raise ValueError(emsg)
+ if tgt_has_bounds:
+ emsg = (
+ f"Coordinate {tgt_coord.name()!r} has bounds for the "
+ f"{self._tgt_cube_position} cube {self._tgt_cube.name()!r}, "
+ f"but not the {self._src_cube_position} cube {self._src_cube.name()!r}."
+ )
+ raise ValueError(emsg)
+ else:
+ if LENIENT["maths"] and ignore_mismatch:
+ # For lenient, ignore coordinate with mis-matched points.
+ dmsg = (
+ f"ignoring src {self._src_cube_position} cube "
+ f"{src_coord.metadata}, unequal points with tgt "
+ f"{src_dims}->{tgt_dims}"
+ )
+ logger.debug(dmsg)
+ else:
+ emsg = (
+ f"Coordinate {src_coord.name()!r} has different points for the "
+ f"LHS cube {self.lhs_cube.name()!r} and "
+ f"RHS cube {self.rhs_cube.name()!r}."
+ )
+ raise ValueError(emsg)
+
+ return points, bounds
+
+ @property
+ def _src_cube(self):
+ if self.map_rhs_to_lhs:
+ result = self.rhs_cube
+ else:
+ result = self.lhs_cube
+ return result
+
+ @property
+ def _src_cube_position(self):
+ if self.map_rhs_to_lhs:
+ result = "RHS"
+ else:
+ result = "LHS"
+ return result
+
+ @property
+ def _src_cube_resolved(self):
+ if self.map_rhs_to_lhs:
+ result = self.rhs_cube_resolved
+ else:
+ result = self.lhs_cube_resolved
+ return result
+
+ @_src_cube_resolved.setter
+ def _src_cube_resolved(self, cube):
+ if self.map_rhs_to_lhs:
+ self.rhs_cube_resolved = cube
+ else:
+ self.lhs_cube_resolved = cube
+
+ @property
+ def _tgt_cube(self):
+ if self.map_rhs_to_lhs:
+ result = self.lhs_cube
+ else:
+ result = self.rhs_cube
+ return result
+
+ @property
+ def _tgt_cube_position(self):
+ if self.map_rhs_to_lhs:
+ result = "LHS"
+ else:
+ result = "RHS"
+ return result
+
+ @property
+ def _tgt_cube_resolved(self):
+ if self.map_rhs_to_lhs:
+ result = self.lhs_cube_resolved
+ else:
+ result = self.rhs_cube_resolved
+ return result
+
+ @_tgt_cube_resolved.setter
+ def _tgt_cube_resolved(self, cube):
+ if self.map_rhs_to_lhs:
+ self.lhs_cube_resolved = cube
+ else:
+ self.rhs_cube_resolved = cube
+
+ def _tgt_cube_prepare(self, data):
+ cube = self._tgt_cube
+
+ # Replace existing tgt cube data with the provided data.
+ cube.data = data
+
+ # Clear the aux factories.
+ for factory in cube.aux_factories:
+ cube.remove_aux_factory(factory)
+
+ # Clear the cube coordinates.
+ for coord in cube.coords():
+ cube.remove_coord(coord)
+
+ # Clear the cube cell measures.
+ for cm in cube.cell_measures():
+ cube.remove_cell_measure(cm)
+
+ # Clear the ancillary variables.
+ for av in cube.ancillary_variables():
+ cube.remove_ancillary_variable(av)
+
+ def cube(self, data, in_place=False):
+ from iris.cube import Cube
+
+ expected_shape = self.shape
+
+ # Ensure that we have been provided with candidate cubes, which are
+ # now resolved and metadata is prepared, ready and awaiting the
+ # resultant resolved cube.
+ if expected_shape is None:
+ emsg = (
+ "Cannot resolve resultant cube, as no candidate cubes have "
+ "been provided."
+ )
+ raise ValueError(emsg)
+
+ if not hasattr(data, "shape"):
+ data = np.asanyarray(data)
+
+ # Ensure that the shape of the provided data is the expected
+ # shape of the resultant resolved cube.
+ if data.shape != expected_shape:
+ emsg = (
+ "Cannot resolve resultant cube, as the provided data must "
+ f"have shape {expected_shape}, got data shape {data.shape}."
+ )
+ raise ValueError(emsg)
+
+ if in_place:
+ result = self._tgt_cube
+
+ if result.shape != expected_shape:
+ emsg = (
+ "Cannot resolve resultant cube in-place, as the "
+ f"{self._tgt_cube_position} tgt cube {result.name()!r} "
+ f"requires data with shape {result.shape}, got data "
+ f"shape {data.shape}. Suggest not performing this "
+ "operation in-place."
+ )
+ raise ValueError(emsg)
+
+ # Prepare target cube for in-place population with the prepared
+ # metadata content and the provided data.
+ self._tgt_cube_prepare(data)
+ else:
+ # Create the resultant resolved cube with provided data.
+ result = Cube(data)
+
+ # Add the combined cube metadata from both the candidate cubes.
+ result.metadata = self.lhs_cube.metadata.combine(
+ self.rhs_cube.metadata
+ )
+
+ # Add the prepared dim coordinates.
+ for item in self.prepared_category.items_dim:
+ coord = item.container(item.points, bounds=item.bounds)
+ coord.metadata = item.metadata.combined
+ result.add_dim_coord(coord, item.dims)
+
+ # Add the prepared aux and scalar coordinates.
+ prepared_aux_coords = (
+ self.prepared_category.items_aux
+ + self.prepared_category.items_scalar
+ )
+ for item in prepared_aux_coords:
+ coord = item.container(item.points, bounds=item.bounds)
+ coord.metadata = item.metadata.combined
+ try:
+ result.add_aux_coord(coord, item.dims)
+ except ValueError as err:
+ scalar = dims = ""
+ if item.dims:
+ plural = "s" if len(item.dims) > 1 else ""
+ dims = f" with tgt dim{plural} {item.dims}"
+ else:
+ scalar = "scalar "
+ dmsg = (
+ f"ignoring prepared {scalar}coordinate "
+ f"{coord.metadata}{dims}, got {err!r}"
+ )
+ logger.debug(dmsg)
+
+ # Add the prepared aux factories.
+ for prepared_factory in self.prepared_factories:
+ dependencies = dict()
+ for (
+ dependency_name,
+ prepared_metadata,
+ ) in prepared_factory.dependencies.items():
+ coord = result.coord(prepared_metadata.combined)
+ dependencies[dependency_name] = coord
+ factory = prepared_factory.container(**dependencies)
+ result.add_aux_factory(factory)
+
+ return result
+
+ @property
+ def mapped(self):
+ """
+ Returns the state of whether all src cube dimensions have been
+ associated with relevant tgt cube dimensions.
+
+ """
+ return self._src_cube.ndim == len(self.mapping)
+
+ @property
+ def shape(self):
+ """Returns the shape of the resultant resolved cube."""
+ return getattr(self, "_broadcast_shape", None)
diff --git a/lib/iris/config.py b/lib/iris/config.py
index e1d7dee29d..eeef1873f9 100644
--- a/lib/iris/config.py
+++ b/lib/iris/config.py
@@ -32,8 +32,11 @@
import configparser
import contextlib
+import logging.config
import os.path
+import pathlib
import warnings
+import yaml
# Returns simple string options
@@ -81,6 +84,14 @@ def get_dir_option(section, option, default=None):
config = configparser.ConfigParser()
config.read([os.path.join(CONFIG_PATH, "site.cfg")])
+# Configure logging.
+fname_logging = pathlib.Path(CONFIG_PATH) / "logging.yaml"
+if not fname_logging.exists():
+ emsg = f"Logging configuration file '{fname_logging!s}' does not exist."
+ raise FileNotFoundError(emsg)
+with open(fname_logging) as fi:
+ logging.config.dictConfig(yaml.safe_load(fi))
+del fname_logging
##################
# Resource options
diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py
index cc41b27b34..812dfae23e 100644
--- a/lib/iris/coord_systems.py
+++ b/lib/iris/coord_systems.py
@@ -126,12 +126,13 @@ def __init__(
radius.
If just two of semi_major_axis, semi_minor_axis, and
- inverse_flattening are given the missing element is calulated from the
+ inverse_flattening are given the missing element is calculated from the
formula:
:math:`flattening = (major - minor) / major`
Currently, Iris will not allow over-specification (all three ellipsoid
- paramaters).
+ parameters).
+
Examples::
cs = GeogCS(6371229)
diff --git a/lib/iris/coords.py b/lib/iris/coords.py
index b5392579c8..d2dcd35c92 100644
--- a/lib/iris/coords.py
+++ b/lib/iris/coords.py
@@ -25,13 +25,19 @@
from iris._data_manager import DataManager
import iris._lazy_data as _lazy
import iris.aux_factory
+from iris.common import (
+ AncillaryVariableMetadata,
+ BaseMetadata,
+ CFVariableMixin,
+ CellMeasureMetadata,
+ CoordMetadata,
+ DimCoordMetadata,
+ metadata_manager_factory,
+)
import iris.exceptions
import iris.time
import iris.util
-from iris._cube_coord_common import CFVariableMixin
-from iris.util import points_step
-
class _DimensionalMetadata(CFVariableMixin, metaclass=ABCMeta):
"""
@@ -59,7 +65,7 @@ def __init__(
standard_name=None,
long_name=None,
var_name=None,
- units="no-unit",
+ units=None,
attributes=None,
):
"""
@@ -92,6 +98,10 @@ def __init__(
# its __init__ or __copy__ methods. The only bounds-related behaviour
# it provides is a 'has_bounds()' method, which always returns False.
+ # Configure the metadata manager.
+ if not hasattr(self, "_metadata_manager"):
+ self._metadata_manager = metadata_manager_factory(BaseMetadata)
+
#: CF standard name of the quantity that the metadata represents.
self.standard_name = standard_name
@@ -160,7 +170,7 @@ def copy(self, values=None):
* values
An array of values for the new dimensional metadata object.
- This may be a different shape to the orginal values array being
+ This may be a different shape to the original values array being
copied.
"""
@@ -340,9 +350,9 @@ def __eq__(self, other):
# If the other object has a means of getting its definition, then do
# the comparison, otherwise return a NotImplemented to let Python try
# to resolve the operator elsewhere.
- if hasattr(other, "_as_defn"):
+ if hasattr(other, "metadata"):
# metadata comparison
- eq = self._as_defn() == other._as_defn()
+ eq = self.metadata == other.metadata
# data values comparison
if eq and eq is not NotImplemented:
eq = iris.util.array_equal(
@@ -367,17 +377,6 @@ def __ne__(self, other):
result = not result
return result
- def _as_defn(self):
- defn = _DMDefn(
- self.standard_name,
- self.long_name,
- self.var_name,
- self.units,
- self.attributes,
- )
-
- return defn
-
# Must supply __hash__ as Python 3 does not enable it if __eq__ is defined.
# NOTE: Violates "objects which compare equal must have the same hash".
# We ought to remove this, as equality of two dimensional metadata can
@@ -688,7 +687,7 @@ def __init__(
standard_name=None,
long_name=None,
var_name=None,
- units="no-unit",
+ units=None,
attributes=None,
):
"""
@@ -714,6 +713,12 @@ def __init__(
A dictionary containing other cf and user-defined attributes.
"""
+ # Configure the metadata manager.
+ if not hasattr(self, "_metadata_manager"):
+ self._metadata_manager = metadata_manager_factory(
+ AncillaryVariableMetadata
+ )
+
super().__init__(
values=data,
standard_name=standard_name,
@@ -788,7 +793,7 @@ def __init__(
standard_name=None,
long_name=None,
var_name=None,
- units="1",
+ units=None,
attributes=None,
measure=None,
):
@@ -821,6 +826,9 @@ def __init__(
'area' and 'volume'. The default is 'area'.
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CellMeasureMetadata)
+
super().__init__(
data=data,
standard_name=standard_name,
@@ -838,14 +846,14 @@ def __init__(
@property
def measure(self):
- return self._measure
+ return self._metadata_manager.measure
@measure.setter
def measure(self, measure):
if measure not in ["area", "volume"]:
emsg = f"measure must be 'area' or 'volume', got {measure!r}"
raise ValueError(emsg)
- self._measure = measure
+ self._metadata_manager.measure = measure
def __str__(self):
result = repr(self)
@@ -864,17 +872,6 @@ def __repr__(self):
)
return result
- def _as_defn(self):
- defn = CellMeasureDefn(
- self.standard_name,
- self.long_name,
- self.var_name,
- self.units,
- self.attributes,
- self.measure,
- )
- return defn
-
def cube_dims(self, cube):
"""
Return the cube dimensions of this CellMeasure.
@@ -895,160 +892,6 @@ def xml_element(self, doc):
return element
-class CoordDefn(
- namedtuple(
- "CoordDefn",
- [
- "standard_name",
- "long_name",
- "var_name",
- "units",
- "attributes",
- "coord_system",
- "climatological",
- ],
- )
-):
- """
- Criterion for identifying a specific type of :class:`DimCoord` or
- :class:`AuxCoord` based on its metadata.
-
- """
-
- __slots__ = ()
-
- def name(self, default="unknown"):
- """
- Returns a human-readable name.
-
- First it tries self.standard_name, then it tries the 'long_name'
- attribute, then the 'var_name' attribute, before falling back to
- the value of `default` (which itself defaults to 'unknown').
-
- """
- return self.standard_name or self.long_name or self.var_name or default
-
- def __lt__(self, other):
- if not isinstance(other, CoordDefn):
- return NotImplemented
-
- def _sort_key(defn):
- # Emulate Python 2 behaviour with None
- return (
- defn.standard_name is not None,
- defn.standard_name,
- defn.long_name is not None,
- defn.long_name,
- defn.var_name is not None,
- defn.var_name,
- defn.units is not None,
- defn.units,
- defn.coord_system is not None,
- defn.coord_system,
- )
-
- return _sort_key(self) < _sort_key(other)
-
-
-class CellMeasureDefn(
- namedtuple(
- "CellMeasureDefn",
- [
- "standard_name",
- "long_name",
- "var_name",
- "units",
- "attributes",
- "measure",
- ],
- )
-):
- """
- Criterion for identifying a specific type of :class:`CellMeasure`
- based on its metadata.
-
- """
-
- __slots__ = ()
-
- def name(self, default="unknown"):
- """
- Returns a human-readable name.
-
- First it tries self.standard_name, then it tries the 'long_name'
- attribute, then the 'var_name' attribute, before falling back to
- the value of `default` (which itself defaults to 'unknown').
-
- """
- return self.standard_name or self.long_name or self.var_name or default
-
- def __lt__(self, other):
- if not isinstance(other, CellMeasureDefn):
- return NotImplemented
-
- def _sort_key(defn):
- # Emulate Python 2 behaviour with None
- return (
- defn.standard_name is not None,
- defn.standard_name,
- defn.long_name is not None,
- defn.long_name,
- defn.var_name is not None,
- defn.var_name,
- defn.units is not None,
- defn.units,
- defn.measure is not None,
- defn.measure,
- )
-
- return _sort_key(self) < _sort_key(other)
-
-
-class _DMDefn(
- namedtuple(
- "DMDefn",
- ["standard_name", "long_name", "var_name", "units", "attributes",],
- )
-):
- """
- Criterion for identifying a specific type of :class:`_DimensionalMetadata`
- based on its metadata.
-
- """
-
- __slots__ = ()
-
- def name(self, default="unknown"):
- """
- Returns a human-readable name.
-
- First it tries self.standard_name, then it tries the 'long_name'
- attribute, then the 'var_name' attribute, before falling back to
- the value of `default` (which itself defaults to 'unknown').
-
- """
- return self.standard_name or self.long_name or self.var_name or default
-
- def __lt__(self, other):
- if not isinstance(other, _DMDefn):
- return NotImplemented
-
- def _sort_key(defn):
- # Emulate Python 2 behaviour with None
- return (
- defn.standard_name is not None,
- defn.standard_name,
- defn.long_name is not None,
- defn.long_name,
- defn.var_name is not None,
- defn.var_name,
- defn.units is not None,
- defn.units,
- )
-
- return _sort_key(self) < _sort_key(other)
-
-
class CoordExtent(
namedtuple(
"_CoordExtent",
@@ -1440,7 +1283,7 @@ def __init__(
standard_name=None,
long_name=None,
var_name=None,
- units="1",
+ units=None,
bounds=None,
attributes=None,
coord_system=None,
@@ -1490,7 +1333,12 @@ def __init__(
Will set to True when a climatological time axis is loaded
from NetCDF.
Always False if no bounds exist.
+
"""
+ # Configure the metadata manager.
+ if not hasattr(self, "_metadata_manager"):
+ self._metadata_manager = metadata_manager_factory(CoordMetadata)
+
super().__init__(
values=points,
standard_name=standard_name,
@@ -1589,7 +1437,7 @@ def bounds(self, bounds):
# Ensure the bounds are a compatible shape.
if bounds is None:
self._bounds_dm = None
- self._climatological = False
+ self.climatological = False
else:
bounds = self._sanitise_array(bounds, 2)
if self.shape != bounds.shape[:-1]:
@@ -1605,6 +1453,15 @@ def bounds(self, bounds):
else:
self._bounds_dm.data = bounds
+ @property
+ def coord_system(self):
+ """The coordinate-system of the coordinate."""
+ return self._metadata_manager.coord_system
+
+ @coord_system.setter
+ def coord_system(self, value):
+ self._metadata_manager.coord_system = value
+
@property
def climatological(self):
"""
@@ -1615,8 +1472,13 @@ def climatological(self):
Always reads as False if there are no bounds.
On set, the input value is cast to a boolean, exceptions raised
if units are not time units or if there are no bounds.
+
"""
- return self._climatological if self.has_bounds() else False
+ if not self.has_bounds():
+ self._metadata_manager.climatological = False
+ if not self.units.is_time_reference():
+ self._metadata_manager.climatological = False
+ return self._metadata_manager.climatological
@climatological.setter
def climatological(self, value):
@@ -1634,7 +1496,7 @@ def climatological(self, value):
emsg = "Cannot set climatological coordinate, no bounds exist."
raise ValueError(emsg)
- self._climatological = value
+ self._metadata_manager.climatological = value
def lazy_points(self):
"""
@@ -1722,18 +1584,6 @@ def _repr_other_metadata(self):
result += ", climatological={}".format(self.climatological)
return result
- def _as_defn(self):
- defn = CoordDefn(
- self.standard_name,
- self.long_name,
- self.var_name,
- self.units,
- self.attributes,
- self.coord_system,
- self.climatological,
- )
- return defn
-
# Must supply __hash__ as Python 3 does not enable it if __eq__ is defined.
# NOTE: Violates "objects which compare equal must have the same hash".
# We ought to remove this, as equality of two coords can *change*, so they
@@ -1986,8 +1836,9 @@ def is_compatible(self, other, ignore=None):
Args:
* other:
- An instance of :class:`iris.coords.Coord` or
- :class:`iris.coords.CoordDefn`.
+ An instance of :class:`iris.coords.Coord`,
+ :class:`iris.common.CoordMetadata` or
+ :class:`iris.common.DimCoordMetadata`.
* ignore:
A single attribute key or iterable of attribute keys to ignore when
comparing the coordinates. Default is None. To ignore all
@@ -2411,7 +2262,7 @@ def from_regular(
standard_name=None,
long_name=None,
var_name=None,
- units="1",
+ units=None,
attributes=None,
coord_system=None,
circular=False,
@@ -2442,7 +2293,7 @@ def from_regular(
"""
points = (zeroth + step) + step * np.arange(count, dtype=np.float32)
- _, regular = points_step(points)
+ _, regular = iris.util.points_step(points)
if not regular:
points = (zeroth + step) + step * np.arange(
count, dtype=np.float64
@@ -2474,7 +2325,7 @@ def __init__(
standard_name=None,
long_name=None,
var_name=None,
- units="1",
+ units=None,
bounds=None,
attributes=None,
coord_system=None,
@@ -2486,6 +2337,9 @@ def __init__(
read-only points and bounds.
"""
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(DimCoordMetadata)
+
super().__init__(
points,
standard_name=standard_name,
@@ -2499,7 +2353,7 @@ def __init__(
)
#: Whether the coordinate wraps by ``coord.units.modulus``.
- self.circular = bool(circular)
+ self.circular = circular
def __deepcopy__(self, memo):
"""
@@ -2515,6 +2369,14 @@ def __deepcopy__(self, memo):
new_coord._bounds_dm.data.flags.writeable = False
return new_coord
+ @property
+ def circular(self):
+ return self._metadata_manager.circular
+
+ @circular.setter
+ def circular(self, circular):
+ self._metadata_manager.circular = bool(circular)
+
def copy(self, points=None, bounds=None):
new_coord = super().copy(points=points, bounds=bounds)
# Make the arrays read-only.
@@ -2524,13 +2386,13 @@ def copy(self, points=None, bounds=None):
return new_coord
def __eq__(self, other):
- # TODO investigate equality of AuxCoord and DimCoord if circular is
- # False.
result = NotImplemented
if isinstance(other, DimCoord):
- result = (
- Coord.__eq__(self, other) and self.circular == other.circular
- )
+ # The "circular" member participates in DimCoord to DimCoord
+ # equivalence. We require to do this explicitly here
+ # as the "circular" member does NOT participate in
+ # DimCoordMetadata to DimCoordMetadata equivalence.
+ result = self.circular == other.circular and super().__eq__(other)
return result
# The __ne__ operator from Coord implements the not __eq__ method.
@@ -2779,19 +2641,20 @@ def __init__(self, method, coords=None, intervals=None, comments=None):
"'method' must be a string - got a '%s'" % type(method)
)
- default_name = CFVariableMixin._DEFAULT_NAME
+ default_name = BaseMetadata.DEFAULT_NAME
_coords = []
+
if coords is None:
pass
elif isinstance(coords, Coord):
_coords.append(coords.name(token=True))
elif isinstance(coords, str):
- _coords.append(CFVariableMixin.token(coords) or default_name)
+ _coords.append(BaseMetadata.token(coords) or default_name)
else:
normalise = (
lambda coord: coord.name(token=True)
if isinstance(coord, Coord)
- else CFVariableMixin.token(coord) or default_name
+ else BaseMetadata.token(coord) or default_name
)
_coords.extend([normalise(coord) for coord in coords])
diff --git a/lib/iris/cube.py b/lib/iris/cube.py
index 1b1a4d7b9a..cc833f8848 100644
--- a/lib/iris/cube.py
+++ b/lib/iris/cube.py
@@ -9,7 +9,7 @@
"""
-from collections import namedtuple, OrderedDict
+from collections import OrderedDict
from collections.abc import (
Iterable,
Container,
@@ -29,56 +29,29 @@
import numpy as np
import numpy.ma as ma
-from iris._cube_coord_common import CFVariableMixin
import iris._concatenate
import iris._constraints
from iris._data_manager import DataManager
import iris._lazy_data as _lazy
-
import iris._merge
import iris.analysis
from iris.analysis.cartography import wrap_lons
import iris.analysis.maths
import iris.aux_factory
+from iris.common import (
+ CFVariableMixin,
+ CoordMetadata,
+ CubeMetadata,
+ DimCoordMetadata,
+ metadata_manager_factory,
+)
import iris.coord_systems
import iris.coords
import iris.exceptions
import iris.util
-__all__ = ["Cube", "CubeList", "CubeMetadata"]
-
-
-class CubeMetadata(
- namedtuple(
- "CubeMetadata",
- [
- "standard_name",
- "long_name",
- "var_name",
- "units",
- "attributes",
- "cell_methods",
- ],
- )
-):
- """
- Represents the phenomenon metadata for a single :class:`Cube`.
-
- """
-
- __slots__ = ()
-
- def name(self, default="unknown"):
- """
- Returns a human-readable name.
-
- First it tries self.standard_name, then it tries the 'long_name'
- attribute, then the 'var_name' attribute, before falling back to
- the value of `default` (which itself defaults to 'unknown').
-
- """
- return self.standard_name or self.long_name or self.var_name or default
+__all__ = ["Cube", "CubeList"]
# The XML namespace to use for CubeML documents
@@ -864,6 +837,9 @@ def __init__(
if isinstance(data, str):
raise TypeError("Invalid data type: {!r}.".format(data))
+ # Configure the metadata manager.
+ self._metadata_manager = metadata_manager_factory(CubeMetadata)
+
# Initialise the cube data manager.
self._data_manager = DataManager(data)
@@ -930,43 +906,15 @@ def __init__(
self.add_ancillary_variable(ancillary_variable, dims)
@property
- def metadata(self):
+ def _names(self):
"""
- An instance of :class:`CubeMetadata` describing the phenomenon.
-
- This property can be updated with any of:
- - another :class:`CubeMetadata` instance,
- - a tuple/dict which can be used to make a :class:`CubeMetadata`,
- - or any object providing the attributes exposed by
- :class:`CubeMetadata`.
+ A tuple containing the value of each name participating in the identity
+ of a :class:`iris.cube.Cube`. This includes the standard name,
+ long name, NetCDF variable name, and the STASH from the attributes
+ dictionary.
"""
- return CubeMetadata(
- self.standard_name,
- self.long_name,
- self.var_name,
- self.units,
- self.attributes,
- self.cell_methods,
- )
-
- @metadata.setter
- def metadata(self, value):
- try:
- value = CubeMetadata(**value)
- except TypeError:
- try:
- value = CubeMetadata(*value)
- except TypeError:
- missing_attrs = [
- field
- for field in CubeMetadata._fields
- if not hasattr(value, field)
- ]
- if missing_attrs:
- raise TypeError("Invalid/incomplete metadata")
- for name in CubeMetadata._fields:
- setattr(self, name, getattr(value, name))
+ return self._metadata_manager._names
def is_compatible(self, other, ignore=None):
"""
@@ -1186,7 +1134,7 @@ def add_cell_measure(self, cell_measure, data_dims=None):
data_dims = self._check_multi_dim_metadata(cell_measure, data_dims)
self._cell_measures_and_dims.append((cell_measure, data_dims))
self._cell_measures_and_dims.sort(
- key=lambda cm_dims: (cm_dims[0]._as_defn(), cm_dims[1])
+ key=lambda cm_dims: (cm_dims[0].metadata, cm_dims[1])
)
def add_ancillary_variable(self, ancillary_variable, data_dims=None):
@@ -1200,6 +1148,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None):
the cube
Kwargs:
+
* data_dims
Integer or iterable of integers giving the data dimensions spanned
by the ancillary variable.
@@ -1207,6 +1156,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None):
Raises a ValueError if an ancillary variable with identical metadata
already exists on the cube.
"""
+
if self.ancillary_variables(ancillary_variable):
raise ValueError("Duplicate ancillary variables not permitted")
@@ -1217,7 +1167,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None):
(ancillary_variable, data_dims)
)
self._ancillary_variables_and_dims.sort(
- key=lambda av_dims: (av_dims[0]._as_defn(), av_dims[1])
+ key=lambda av_dims: (av_dims[0].metadata, av_dims[1])
)
def add_dim_coord(self, dim_coord, data_dim):
@@ -1301,6 +1251,9 @@ def _remove_coord(self, coord):
for coord_, dims in self._aux_coords_and_dims
if coord_ is not coord
]
+ for aux_factory in self.aux_factories:
+ if coord.metadata == aux_factory.metadata:
+ self.remove_aux_factory(aux_factory)
def remove_coord(self, coord):
"""
@@ -1333,7 +1286,7 @@ def remove_cell_measure(self, cell_measure):
(a) a :attr:`standard_name`, :attr:`long_name`, or
:attr:`var_name`. Defaults to value of `default`
(which itself defaults to `unknown`) as defined in
- :class:`iris._cube_coord_common.CFVariableMixin`.
+ :class:`iris.common.CFVariableMixin`.
(b) a cell_measure instance with metadata equal to that of
the desired cell_measures.
@@ -1426,11 +1379,11 @@ def coord_dims(self, coord):
]
# Search derived aux coords
- target_defn = coord._as_defn()
if not matches:
+ target_metadata = coord.metadata
def match(factory):
- return factory._as_defn() == target_defn
+ return factory.metadata == target_metadata
factories = filter(match, self._aux_factories)
matches = [
@@ -1586,13 +1539,14 @@ def coords(
(a) a :attr:`standard_name`, :attr:`long_name`, or
:attr:`var_name`. Defaults to value of `default`
(which itself defaults to `unknown`) as defined in
- :class:`iris._cube_coord_common.CFVariableMixin`.
+ :class:`iris.common.CFVariableMixin`.
(b) a coordinate instance with metadata equal to that of
the desired coordinates. Accepts either a
:class:`iris.coords.DimCoord`, :class:`iris.coords.AuxCoord`,
- :class:`iris.aux_factory.AuxCoordFactory`
- or :class:`iris.coords.CoordDefn`.
+ :class:`iris.aux_factory.AuxCoordFactory`,
+ :class:`iris.common.CoordMetadata` or
+ :class:`iris.common.DimCoordMetadata`.
* standard_name
The CF standard name of the desired coordinate. If None, does not
check for standard name.
@@ -1710,14 +1664,17 @@ def attr_filter(coord_):
]
if coord is not None:
- if isinstance(coord, iris.coords.CoordDefn):
- defn = coord
+ if hasattr(coord, "__class__") and coord.__class__ in (
+ CoordMetadata,
+ DimCoordMetadata,
+ ):
+ target_metadata = coord
else:
- defn = coord._as_defn()
+ target_metadata = coord.metadata
coords_and_factories = [
coord_
for coord_ in coords_and_factories
- if coord_._as_defn() == defn
+ if coord_.metadata == target_metadata
]
if contains_dimension is not None:
@@ -1883,7 +1840,7 @@ def cell_measures(self, name_or_cell_measure=None):
(a) a :attr:`standard_name`, :attr:`long_name`, or
:attr:`var_name`. Defaults to value of `default`
(which itself defaults to `unknown`) as defined in
- :class:`iris._cube_coord_common.CFVariableMixin`.
+ :class:`iris.common.CFVariableMixin`.
(b) a cell_measure instance with metadata equal to that of
the desired cell_measures.
@@ -1966,7 +1923,7 @@ def ancillary_variables(self, name_or_ancillary_variable=None):
(a) a :attr:`standard_name`, :attr:`long_name`, or
:attr:`var_name`. Defaults to value of `default`
(which itself defaults to `unknown`) as defined in
- :class:`iris._cube_coord_common.CFVariableMixin`.
+ :class:`iris.common.CFVariableMixin`.
(b) a ancillary_variable instance with metadata equal to that of
the desired ancillary_variables.
@@ -2047,11 +2004,13 @@ def cell_methods(self):
done on the phenomenon.
"""
- return self._cell_methods
+ return self._metadata_manager.cell_methods
@cell_methods.setter
def cell_methods(self, cell_methods):
- self._cell_methods = tuple(cell_methods) if cell_methods else tuple()
+ self._metadata_manager.cell_methods = (
+ tuple(cell_methods) if cell_methods else tuple()
+ )
def core_data(self):
"""
@@ -2876,7 +2835,7 @@ def intersection(self, *args, **kwargs):
For ranges defined over "circular" coordinates (i.e. those
where the `units` attribute has a modulus defined) the cube
- will be "rolled" to fit where neccesary.
+ will be "rolled" to fit where necessary.
.. warning::
@@ -4079,7 +4038,7 @@ def aggregated_by(self, coords, aggregator, **kwargs):
)
coords = self._as_list_of_coords(coords)
- for coord in sorted(coords, key=lambda coord: coord._as_defn()):
+ for coord in sorted(coords, key=lambda coord: coord.metadata):
if coord.ndim > 1:
msg = (
"Cannot aggregate_by coord %s as it is "
@@ -4195,7 +4154,7 @@ def aggregated_by(self, coords, aggregator, **kwargs):
for coord in groupby.coords:
if (
dim_coord is not None
- and dim_coord._as_defn() == coord._as_defn()
+ and dim_coord.metadata == coord.metadata
and isinstance(coord, iris.coords.DimCoord)
):
aggregateby_cube.add_dim_coord(
diff --git a/lib/iris/etc/logging.yaml b/lib/iris/etc/logging.yaml
new file mode 100644
index 0000000000..a73906e7db
--- /dev/null
+++ b/lib/iris/etc/logging.yaml
@@ -0,0 +1,45 @@
+version: 1
+
+formatters:
+ basic:
+ format: "%(asctime)s %(name)s %(levelname)s - %(message)s"
+ datefmt: "%d-%m-%Y %H:%M:%S"
+ basic-cls-func:
+ format: "%(asctime)s %(name)s %(levelname)s - %(message)s [%(cls)s.%(funcName)s]"
+ datefmt: "%d-%m-%Y %H:%M:%S"
+ basic-func:
+ format: "%(asctime)s %(name)s %(levelname)s - %(message)s [%(funcName)s]"
+
+handlers:
+ console:
+ class: logging.StreamHandler
+ formatter: basic
+ stream: ext://sys.stdout
+ console-cls-func:
+ class: logging.StreamHandler
+ formatter: basic-cls-func
+ stream: ext://sys.stdout
+ console-func:
+ class: logging.StreamHandler
+ formatter: basic-func
+ stream: ext://sys.stdout
+
+loggers:
+ iris.common.metadata:
+ level: INFO
+ handlers: [console-cls-func]
+ propagate: no
+ iris.common.resolve:
+ level: INFO
+ handlers: [console-func]
+ propagate: no
+ matplotlib:
+ level: INFO
+ PIL:
+ level: INFO
+ urllib3:
+ level: INFO
+
+root:
+ level: INFO
+ handlers: [console]
diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py
index 2992360247..e357f2ca9d 100644
--- a/lib/iris/experimental/stratify.py
+++ b/lib/iris/experimental/stratify.py
@@ -68,8 +68,8 @@ def relevel(cube, src_levels, tgt_levels, axis=None, interpolator=None):
that are generally monotonic in the direction of interpolation, such as
height/pressure or salinity/depth.
- Parameters
- ----------
+ Args:
+
cube : :class:`~iris.cube.Cube`
The phenomenon data to be re-levelled.
diff --git a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb
index 5ecfeb77b1..2afc823795 100644
--- a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb
+++ b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb
@@ -1173,6 +1173,7 @@ fc_extras
import numpy.ma as ma
import iris.aux_factory
+ from iris.common.mixin import _get_valid_standard_name
import iris.coords
import iris.coord_systems
import iris.fileformats.cf as cf
@@ -1182,7 +1183,6 @@ fc_extras
import iris.exceptions
import iris.std_names
import iris.util
- from iris._cube_coord_common import get_valid_standard_name
from iris._lazy_data import as_lazy_data
@@ -1195,6 +1195,8 @@ fc_extras
UD_UNITS_LON = ['degrees_east', 'degree_east', 'degree_e', 'degrees_e',
'degreee', 'degreese', 'degrees', 'degrees east',
'degree east', 'degree e', 'degrees e']
+ UNKNOWN_UNIT_STRING = "?"
+ NO_UNIT_STRING = "-"
#
# CF Dimensionless Vertical Coordinates
@@ -1298,7 +1300,7 @@ fc_extras
if standard_name is not None:
try:
- cube.standard_name = get_valid_standard_name(standard_name)
+ cube.standard_name = _get_valid_standard_name(standard_name)
except ValueError:
if cube.long_name is not None:
cube.attributes['invalid_standard_name'] = standard_name
@@ -1651,9 +1653,9 @@ fc_extras
################################################################################
def get_attr_units(cf_var, attributes):
- attr_units = getattr(cf_var, CF_ATTR_UNITS, cf_units._UNIT_DIMENSIONLESS)
+ attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING)
if not attr_units:
- attr_units = '1'
+ attr_units = UNKNOWN_UNIT_STRING
# Sanitise lat/lon units.
if attr_units in UD_UNITS_LAT or attr_units in UD_UNITS_LON:
@@ -1668,10 +1670,10 @@ fc_extras
cf_var.cf_name, attr_units)
warnings.warn(msg)
attributes['invalid_units'] = attr_units
- attr_units = cf_units._UNKNOWN_UNIT_STRING
+ attr_units = UNKNOWN_UNIT_STRING
if np.issubdtype(cf_var.dtype, np.str_):
- attr_units = cf_units._NO_UNIT_STRING
+ attr_units = NO_UNIT_STRING
# Get any assoicated calendar for a time reference coordinate.
if cf_units.as_unit(attr_units).is_time_reference():
@@ -1693,7 +1695,7 @@ fc_extras
if standard_name is not None:
try:
- standard_name = get_valid_standard_name(standard_name)
+ standard_name = _get_valid_standard_name(standard_name)
except ValueError:
if long_name is not None:
attributes['invalid_standard_name'] = standard_name
diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py
index 1db4e6c61e..75f328a80e 100644
--- a/lib/iris/fileformats/cf.py
+++ b/lib/iris/fileformats/cf.py
@@ -10,7 +10,7 @@
References:
[CF] NetCDF Climate and Forecast (CF) Metadata conventions, Version 1.5, October, 2010.
-[NUG] NetCDF User's Guide, http://www.unidata.ucar.edu/software/netcdf/docs/netcdf.html
+[NUG] NetCDF User's Guide, https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/
"""
diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py
index 0d9d149664..0464eb37ed 100644
--- a/lib/iris/fileformats/name_loaders.py
+++ b/lib/iris/fileformats/name_loaders.py
@@ -882,7 +882,7 @@ def load_NAMEIII_timeseries(filename):
for i, data_list in enumerate(data_lists):
data_list.append(float(vals[i + 1]))
- data_arrays = [np.array(l) for l in data_lists]
+ data_arrays = [np.array(dl) for dl in data_lists]
time_array = np.array(time_list)
tdim = NAMECoord(name="time", dimension=0, values=time_array)
@@ -955,7 +955,7 @@ def load_NAMEII_timeseries(filename):
for i, data_list in enumerate(data_lists):
data_list.append(float(vals[i + 2]))
- data_arrays = [np.array(l) for l in data_lists]
+ data_arrays = [np.array(dl) for dl in data_lists]
time_array = np.array(time_list)
tdim = NAMECoord(name="time", dimension=0, values=time_array)
@@ -1111,7 +1111,7 @@ def load_NAMEIII_version2(filename):
for i, data_list in enumerate(data_lists):
data_list.append(float(vals[i + datacol1]))
- data_arrays = [np.array(l) for l in data_lists]
+ data_arrays = [np.array(dl) for dl in data_lists]
# Convert Z and T arrays into arrays of indices
zind = []
diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py
index 4d7ddedc61..f34dc45e72 100644
--- a/lib/iris/fileformats/netcdf.py
+++ b/lib/iris/fileformats/netcdf.py
@@ -22,6 +22,7 @@
import warnings
import dask.array as da
+import cf_units
import netCDF4
import numpy as np
import numpy.ma as ma
@@ -959,7 +960,7 @@ def write(
than global attributes.
* unlimited_dimensions (iterable of strings and/or
- :class:`iris.coords.Coord` objects):
+ :class:`iris.coords.Coord` objects):
List of coordinate names (or coordinate objects)
corresponding to coordinate dimensions of `cube` to save with the
NetCDF dimension variable length 'UNLIMITED'. By default, no
@@ -992,10 +993,10 @@ def write(
Used to manually specify the HDF5 chunksizes for each dimension of
the variable. A detailed discussion of HDF chunking and I/O
performance is available here:
- http://www.hdfgroup.org/HDF5/doc/H5.user/Chunking.html. Basically,
- you want the chunk size for each dimension to match as closely as
- possible the size of the data block that users will read from the
- file. `chunksizes` cannot be set if `contiguous=True`.
+ https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html.
+ Basically, you want the chunk size for each dimension to match
+ as closely as possible the size of the data block that users will
+ read from the file. `chunksizes` cannot be set if `contiguous=True`.
* endian (string):
Used to control whether the data is stored in little or big endian
@@ -1760,7 +1761,7 @@ def _inner_create_cf_cellmeasure_or_ancil_variable(
# Add the data to the CF-netCDF variable.
cf_var[:] = data
- if dimensional_metadata.units != "unknown":
+ if dimensional_metadata.units.is_udunits():
_setncattr(cf_var, "units", str(dimensional_metadata.units))
if dimensional_metadata.standard_name is not None:
@@ -1926,7 +1927,7 @@ def _create_cf_coord_variable(self, cube, dimension_names, coord):
# Deal with CF-netCDF units and standard name.
standard_name, long_name, units = self._cf_coord_identity(coord)
- if units != "unknown":
+ if cf_units.as_unit(units).is_udunits():
_setncattr(cf_var, "units", units)
if standard_name is not None:
@@ -2371,7 +2372,7 @@ def store(data, cf_var, fill_value):
if cube.long_name:
_setncattr(cf_var, "long_name", cube.long_name)
- if cube.units != "unknown":
+ if cube.units.is_udunits():
_setncattr(cf_var, "units", str(cube.units))
# Add the CF-netCDF calendar attribute.
@@ -2506,7 +2507,7 @@ def save(
than global attributes.
* unlimited_dimensions (iterable of strings and/or
- :class:`iris.coords.Coord` objects):
+ :class:`iris.coords.Coord` objects):
List of coordinate names (or coordinate objects) corresponding
to coordinate dimensions of `cube` to save with the NetCDF dimension
variable length 'UNLIMITED'. By default, no unlimited dimensions are
@@ -2538,7 +2539,7 @@ def save(
* chunksizes (tuple of int):
Used to manually specify the HDF5 chunksizes for each dimension of the
variable. A detailed discussion of HDF chunking and I/O performance is
- available here: http://www.hdfgroup.org/HDF5/doc/H5.user/Chunking.html.
+ available here: https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html.
Basically, you want the chunk size for each dimension to match as
closely as possible the size of the data block that users will read
from the file. `chunksizes` cannot be set if `contiguous=True`.
diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py
index deb4ac862c..4cf8755bb9 100644
--- a/lib/iris/fileformats/nimrod_load_rules.py
+++ b/lib/iris/fileformats/nimrod_load_rules.py
@@ -233,9 +233,8 @@ def reference_time(cube, field):
field.dt_hour,
field.dt_minute,
)
-
ref_time_coord = DimCoord(
- np.array(TIME_UNIT.date2num(data_date), dtype=np.int64),
+ np.array(np.round(TIME_UNIT.date2num(data_date)), dtype=np.int64),
standard_name="forecast_reference_time",
units=TIME_UNIT,
)
@@ -301,7 +300,9 @@ def experiment(cube, field):
"""Add an 'experiment number' to the cube, if present in the field."""
if not is_missing(field, field.experiment_num):
cube.add_aux_coord(
- DimCoord(field.experiment_num, long_name="experiment_number")
+ DimCoord(
+ field.experiment_num, long_name="experiment_number", units="1"
+ )
)
@@ -412,8 +413,8 @@ def coord_system(field, handle_metadata_errors):
)
if any([is_missing(field, v) for v in crs_args]):
warnings.warn(
- f"Coordinate Reference System is not completely defined. "
- f"Plotting and reprojection may be impaired."
+ "Coordinate Reference System is not completely defined. "
+ "Plotting and reprojection may be impaired."
)
coord_sys = iris.coord_systems.TransverseMercator(
*crs_args, iris.coord_systems.GeogCS(**ellipsoid),
@@ -592,7 +593,9 @@ def ensemble_member(cube, field):
if not is_missing(field, ensemble_member_value):
cube.add_aux_coord(
DimCoord(
- np.array(ensemble_member_value, dtype=np.int32), "realization"
+ np.array(ensemble_member_value, dtype=np.int32),
+ "realization",
+ units="1",
)
)
diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py
index c0a4081970..53d9f4dc35 100644
--- a/lib/iris/fileformats/pp_load_rules.py
+++ b/lib/iris/fileformats/pp_load_rules.py
@@ -147,6 +147,7 @@ def _convert_vertical_coords(
model_level_number,
standard_name="model_level_number",
attributes={"positive": "down"},
+ units="1",
)
coords_and_dims.append((coord, dim))
@@ -197,6 +198,7 @@ def _convert_vertical_coords(
model_level_number,
long_name="soil_model_level_number",
attributes={"positive": "down"},
+ units="1",
)
coords_and_dims.append((coord, dim))
elif np.any(brsvd1 != brlev):
@@ -235,6 +237,7 @@ def _convert_vertical_coords(
model_level_number,
standard_name="model_level_number",
attributes={"positive": "up"},
+ units="1",
)
level_pressure = _dim_or_aux(
bhlev,
@@ -243,7 +246,10 @@ def _convert_vertical_coords(
bounds=np.vstack((bhrlev, brsvd2)).T,
)
sigma = AuxCoord(
- blev, long_name="sigma", bounds=np.vstack((brlev, brsvd1)).T
+ blev,
+ long_name="sigma",
+ bounds=np.vstack((brlev, brsvd1)).T,
+ units="1",
)
coords_and_dims.extend(
[(model_level_number, dim), (level_pressure, dim), (sigma, dim)]
@@ -265,6 +271,7 @@ def _convert_vertical_coords(
model_level_number,
standard_name="model_level_number",
attributes={"positive": "up"},
+ units="1",
)
level_height = _dim_or_aux(
blev,
@@ -274,7 +281,10 @@ def _convert_vertical_coords(
attributes={"positive": "up"},
)
sigma = AuxCoord(
- bhlev, long_name="sigma", bounds=np.vstack((bhrlev, brsvd2)).T
+ bhlev,
+ long_name="sigma",
+ bounds=np.vstack((bhrlev, brsvd2)).T,
+ units="1",
)
coords_and_dims.extend(
[(model_level_number, dim), (level_height, dim), (sigma, dim)]
@@ -627,7 +637,7 @@ def _convert_time_coords(
def date2hours(t):
epoch_hours = _epoch_date_hours(epoch_hours_unit, t)
if t.minute == 0 and t.second == 0:
- epoch_hours = round(epoch_hours)
+ epoch_hours = np.around(epoch_hours)
return epoch_hours
def date2year(t_in):
@@ -846,7 +856,7 @@ def _convert_scalar_realization_coords(lbrsvd4):
coords_and_dims = []
if lbrsvd4 != 0:
coords_and_dims.append(
- (DimCoord(lbrsvd4, standard_name="realization"), None)
+ (DimCoord(lbrsvd4, standard_name="realization", units="1"), None)
)
return coords_and_dims
@@ -1078,7 +1088,7 @@ def _all_other_rules(f):
and f.lbmon == f.lbmond
):
aux_coords_and_dims.append(
- (AuxCoord(f.lbmon, long_name="month_number"), None)
+ (AuxCoord(f.lbmon, long_name="month_number", units="1"), None)
)
aux_coords_and_dims.append(
(
diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py
index 1e6cac691e..07ed5eb8ce 100644
--- a/lib/iris/fileformats/rules.py
+++ b/lib/iris/fileformats/rules.py
@@ -28,7 +28,7 @@ class ConcreteReferenceTarget:
"""Everything you need to make a real Cube for a named reference."""
def __init__(self, name, transform=None):
- #: The name used to connect references with referencees.
+ #: The name used to connect references with references.
self.name = name
#: An optional transformation to apply to the cubes.
self.transform = transform
diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py
index 36f79d32d3..31cd862d85 100644
--- a/lib/iris/io/__init__.py
+++ b/lib/iris/io/__init__.py
@@ -334,7 +334,7 @@ def find_saver(filespec):
def save(source, target, saver=None, **kwargs):
"""
- Save one or more Cubes to file (or other writable).
+ Save one or more Cubes to file (or other writeable).
Iris currently supports three file formats for saving, which it can
recognise by filename extension:
@@ -353,7 +353,7 @@ def save(source, target, saver=None, **kwargs):
* source - A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or
sequence of cubes.
- * target - A filename (or writable, depending on file format).
+ * target - A filename (or writeable, depending on file format).
When given a filename or file, Iris can determine the
file format.
diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py
index 6cca135d21..ea2d939280 100644
--- a/lib/iris/iterate.py
+++ b/lib/iris/iterate.py
@@ -302,12 +302,13 @@ def __init__(self, coord):
self._coord = coord
# Methods of contained class we need to expose/use.
- def _as_defn(self):
- return self._coord._as_defn()
+ @property
+ def metadata(self):
+ return self._coord.metadata
- # Methods of contained class we want to overide/customise.
+ # Methods of contained class we want to override/customise.
def __eq__(self, other):
- return self._coord._as_defn() == other._as_defn()
+ return self._coord.metadata == other.metadata
# Force use of __eq__ for set operations.
def __hash__(self):
diff --git a/lib/iris/plot.py b/lib/iris/plot.py
index 9dff582bc4..36afe906dc 100644
--- a/lib/iris/plot.py
+++ b/lib/iris/plot.py
@@ -168,7 +168,7 @@ def guess_axis(coord):
if isinstance(coord, iris.coords.DimCoord)
]
if aux_coords:
- aux_coords.sort(key=lambda coord: coord._as_defn())
+ aux_coords.sort(key=lambda coord: coord.metadata)
coords[dim] = aux_coords[0]
# If plotting a 2 dimensional plot, check for 2d coordinates
@@ -183,7 +183,7 @@ def guess_axis(coord):
coord for coord in two_dim_coords if coord.ndim == 2
]
if len(two_dim_coords) >= 2:
- two_dim_coords.sort(key=lambda coord: coord._as_defn())
+ two_dim_coords.sort(key=lambda coord: coord.metadata)
coords = two_dim_coords[:2]
if mode == iris.coords.POINT_MODE:
diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py
index 66966daaf4..b5b80a97ef 100644
--- a/lib/iris/tests/__init__.py
+++ b/lib/iris/tests/__init__.py
@@ -21,6 +21,7 @@
import codecs
import collections
+from collections.abc import Mapping
import contextlib
import datetime
import difflib
@@ -76,13 +77,6 @@
else:
GDAL_AVAILABLE = True
-try:
- from iris_grib.message import GribMessage
-
- GRIB_AVAILABLE = True
-except ImportError:
- GRIB_AVAILABLE = False
-
try:
import iris_sample_data # noqa
except ImportError:
@@ -799,7 +793,7 @@ def _unique_id(self):
bits[0] = os.path.splitext(file_name)[0]
folder, location = os.path.split(path)
bits = [location] + bits
- while location not in ["iris", "example_tests"]:
+ while location not in ["iris", "gallery_tests"]:
folder, location = os.path.split(folder)
bits = [location] + bits
test_id = ".".join(bits)
@@ -1011,6 +1005,78 @@ def assertArrayShapeStats(self, result, shape, mean, std_dev, rtol=1e-6):
self.assertArrayAllClose(result.data.mean(), mean, rtol=rtol)
self.assertArrayAllClose(result.data.std(), std_dev, rtol=rtol)
+ def assertDictEqual(self, lhs, rhs, msg=None):
+ """
+ This method overrides unittest.TestCase.assertDictEqual (new in Python3.1)
+ in order to cope with dictionary comparison where the value of a key may
+ be a numpy array.
+
+ """
+ if not isinstance(lhs, Mapping):
+ emsg = (
+ f"Provided LHS argument is not a 'Mapping', got {type(lhs)}."
+ )
+ self.fail(emsg)
+
+ if not isinstance(rhs, Mapping):
+ emsg = (
+ f"Provided RHS argument is not a 'Mapping', got {type(rhs)}."
+ )
+ self.fail(emsg)
+
+ if set(lhs.keys()) != set(rhs.keys()):
+ emsg = f"{lhs!r} != {rhs!r}."
+ self.fail(emsg)
+
+ for key in lhs.keys():
+ lvalue, rvalue = lhs[key], rhs[key]
+
+ if ma.isMaskedArray(lvalue) or ma.isMaskedArray(rvalue):
+ if not ma.isMaskedArray(lvalue):
+ emsg = (
+ f"Dictionary key {key!r} values are not equal, "
+ f"the LHS value has type {type(lvalue)} and "
+ f"the RHS value has type {ma.core.MaskedArray}."
+ )
+ raise AssertionError(emsg)
+
+ if not ma.isMaskedArray(rvalue):
+ emsg = (
+ f"Dictionary key {key!r} values are not equal, "
+ f"the LHS value has type {ma.core.MaskedArray} and "
+ f"the RHS value has type {type(lvalue)}."
+ )
+ raise AssertionError(emsg)
+
+ self.assertMaskedArrayEqual(lvalue, rvalue)
+ elif isinstance(lvalue, np.ndarray) or isinstance(
+ rvalue, np.ndarray
+ ):
+ if not isinstance(lvalue, np.ndarray):
+ emsg = (
+ f"Dictionary key {key!r} values are not equal, "
+ f"the LHS value has type {type(lvalue)} and "
+ f"the RHS value has type {np.ndarray}."
+ )
+ raise AssertionError(emsg)
+
+ if not isinstance(rvalue, np.ndarray):
+ emsg = (
+ f"Dictionary key {key!r} values are not equal, "
+ f"the LHS value has type {np.ndarray} and "
+ f"the RHS value has type {type(rvalue)}."
+ )
+ raise AssertionError(emsg)
+
+ self.assertArrayEqual(lvalue, rvalue)
+ else:
+ if lvalue != rvalue:
+ emsg = (
+ f"Dictionary key {key!r} values are not equal, "
+ f"{lvalue!r} != {rvalue!r}."
+ )
+ raise AssertionError(emsg)
+
# An environment variable controls whether test timings are output.
#
@@ -1181,12 +1247,6 @@ class MyPlotTests(test.GraphicsTest):
return skip(fn)
-skip_grib = unittest.skipIf(
- not GRIB_AVAILABLE,
- 'Test(s) require "iris-grib" package, ' "which is not available.",
-)
-
-
skip_sample_data = unittest.skipIf(
not SAMPLE_DATA_AVAILABLE,
('Test(s) require "iris-sample-data", ' "which is not available."),
diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py
index 0a4d186b39..ba50e389a8 100644
--- a/lib/iris/tests/integration/fast_load/test_fast_load.py
+++ b/lib/iris/tests/integration/fast_load/test_fast_load.py
@@ -9,7 +9,7 @@
# before importing anything else.
import iris.tests as tests
-from collections import Iterable
+from collections.abc import Iterable
import tempfile
import shutil
@@ -377,7 +377,8 @@ def callback(cube, collation, filename):
# Make an 'expected' from selected fields, with the expected attribute.
expected = CubeList([flds[1], flds[3]]).merge()
if not self.do_fast_loads:
- expected[0].attributes["LBVC"] = 8
+ # This is actually a NumPy int32, so honour that here.
+ expected[0].attributes["LBVC"] = np.int32(8)
else:
expected[0].attributes["A_LBVC"] = [8, 8]
diff --git a/lib/iris/tests/integration/format_interop/test_name_grib.py b/lib/iris/tests/integration/format_interop/test_name_grib.py
deleted file mode 100644
index 63889b879d..0000000000
--- a/lib/iris/tests/integration/format_interop/test_name_grib.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright Iris contributors
-#
-# This file is part of Iris and is released under the LGPL license.
-# See COPYING and COPYING.LESSER in the root of the repository for full
-# licensing details.
-"""Integration tests for NAME to GRIB2 interoperability."""
-
-# Import iris.tests first so that some things can be initialised before
-# importing anything else.
-import iris.tests as tests
-
-import numpy as np
-import warnings
-
-import iris
-
-
-def name_cb(cube, field, filename):
- # NAME files give the time point at the end of the range but Iris'
- # GRIB loader creates it in the middle (the GRIB file itself doesn't
- # encode a time point). Here we make them consistent so we can
- # easily compare them.
- t_coord = cube.coord("time")
- t_coord.points = t_coord.bounds[0][1]
- fp_coord = cube.coord("forecast_period")
- fp_coord.points = fp_coord.bounds[0][1]
- # NAME contains extra vertical meta-data.
- z_coord = cube.coords("height")
- if z_coord:
- z_coord[0].standard_name = "height"
- z_coord[0].long_name = "height above ground level"
-
-
-@tests.skip_grib
-class TestNameToGRIB(tests.IrisTest):
- def check_common(self, name_cube, grib_cube):
- self.assertTrue(np.allclose(name_cube.data, name_cube.data))
- self.assertTrue(
- np.allclose(
- name_cube.coord("latitude").points,
- grib_cube.coord("latitude").points,
- )
- )
- self.assertTrue(
- np.allclose(
- name_cube.coord("longitude").points,
- grib_cube.coord("longitude").points - 360,
- )
- )
-
- for c in ["height", "time"]:
- if name_cube.coords(c):
- self.assertEqual(name_cube.coord(c), grib_cube.coord(c))
-
- @tests.skip_data
- def test_name2_field(self):
- filepath = tests.get_data_path(("NAME", "NAMEII_field.txt"))
- name_cubes = iris.load(filepath)
-
- # There is a known load/save problem with numerous
- # gribapi/eccodes versions and
- # zero only data, where min == max.
- # This may be a problem with data scaling.
- for i, name_cube in enumerate(name_cubes):
- data = name_cube.data
- if np.min(data) == np.max(data):
- msg = (
- 'NAMEII cube #{}, "{}" has empty data : '
- "SKIPPING test for this cube, as save/load will "
- "not currently work."
- )
- warnings.warn(msg.format(i, name_cube.name()))
- continue
-
- with self.temp_filename(".grib2") as temp_filename:
- iris.save(name_cube, temp_filename)
- grib_cube = iris.load_cube(temp_filename, callback=name_cb)
- self.check_common(name_cube, grib_cube)
- self.assertCML(
- grib_cube,
- tests.get_result_path(
- (
- "integration",
- "name_grib",
- "NAMEII",
- "{}_{}.cml".format(i, name_cube.name()),
- )
- ),
- )
-
- @tests.skip_data
- def test_name3_field(self):
- filepath = tests.get_data_path(("NAME", "NAMEIII_field.txt"))
- name_cubes = iris.load(filepath)
- for i, name_cube in enumerate(name_cubes):
- with self.temp_filename(".grib2") as temp_filename:
- iris.save(name_cube, temp_filename)
- grib_cube = iris.load_cube(temp_filename, callback=name_cb)
-
- self.check_common(name_cube, grib_cube)
- self.assertCML(
- grib_cube,
- tests.get_result_path(
- (
- "integration",
- "name_grib",
- "NAMEIII",
- "{}_{}.cml".format(i, name_cube.name()),
- )
- ),
- )
-
-
-if __name__ == "__main__":
- tests.main()
diff --git a/lib/iris/tests/integration/format_interop/test_pp_grib.py b/lib/iris/tests/integration/format_interop/test_pp_grib.py
deleted file mode 100644
index 70d89f834a..0000000000
--- a/lib/iris/tests/integration/format_interop/test_pp_grib.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright Iris contributors
-#
-# This file is part of Iris and is released under the LGPL license.
-# See COPYING and COPYING.LESSER in the root of the repository for full
-# licensing details.
-"""Integration tests for PP/GRIB interoperability."""
-
-# Import iris.tests first so that some things can be initialised before
-# importing anything else.
-import iris.tests as tests
-
-import iris
-
-
-@tests.skip_grib
-class TestBoundedTime(tests.IrisTest):
- @tests.skip_data
- def test_time_and_forecast_period_round_trip(self):
- pp_path = tests.get_data_path(
- ("PP", "meanMaxMin", "200806081200__qwpb.T24.pp")
- )
- # Choose the first time-bounded Cube in the PP dataset.
- original = [
- cube
- for cube in iris.load(pp_path)
- if cube.coord("time").has_bounds()
- ][0]
- # Save it to GRIB2 and re-load.
- with self.temp_filename(".grib2") as grib_path:
- iris.save(original, grib_path)
- from_grib = iris.load_cube(grib_path)
- # Avoid the downcasting warning when saving to PP.
- from_grib.data = from_grib.data.astype("f4")
- # Re-save to PP and re-load.
- with self.temp_filename(".pp") as pp_path:
- iris.save(from_grib, pp_path)
- from_pp = iris.load_cube(pp_path)
- self.assertEqual(original.coord("time"), from_grib.coord("time"))
- self.assertEqual(
- original.coord("forecast_period"),
- from_grib.coord("forecast_period"),
- )
- self.assertEqual(original.coord("time"), from_pp.coord("time"))
- self.assertEqual(
- original.coord("forecast_period"), from_pp.coord("forecast_period")
- )
-
-
-if __name__ == "__main__":
- tests.main()
diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py
index 8c6e0f6659..267e5beb50 100644
--- a/lib/iris/tests/integration/test_netcdf.py
+++ b/lib/iris/tests/integration/test_netcdf.py
@@ -81,7 +81,9 @@ def test_hybrid_height_and_pressure(self):
1200.0, long_name="level_pressure", units="hPa"
)
)
- cube.add_aux_coord(iris.coords.DimCoord(0.5, long_name="other sigma"))
+ cube.add_aux_coord(
+ iris.coords.DimCoord(0.5, long_name="other sigma", units="1")
+ )
cube.add_aux_coord(
iris.coords.DimCoord(
1000.0, long_name="surface_air_pressure", units="hPa"
diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py
index 6fbf180ac5..b9b096d782 100644
--- a/lib/iris/tests/integration/test_pp.py
+++ b/lib/iris/tests/integration/test_pp.py
@@ -299,7 +299,7 @@ def test_hybrid_height_with_non_standard_coords(self):
delta_lower, delta, delta_upper = 150, 200, 250
cube = Cube(np.zeros((ny, nx)), "air_temperature")
- level_coord = AuxCoord(0, "model_level_number")
+ level_coord = AuxCoord(0, "model_level_number", units="1")
cube.add_aux_coord(level_coord)
delta_coord = AuxCoord(
delta,
@@ -308,7 +308,10 @@ def test_hybrid_height_with_non_standard_coords(self):
units="m",
)
sigma_coord = AuxCoord(
- sigma, bounds=[[sigma_lower, sigma_upper]], long_name="mavis"
+ sigma,
+ bounds=[[sigma_lower, sigma_upper]],
+ long_name="mavis",
+ units="1",
)
surface_altitude_coord = AuxCoord(
np.zeros((ny, nx)), "surface_altitude", units="m"
@@ -343,7 +346,7 @@ def test_hybrid_pressure_with_non_standard_coords(self):
delta_lower, delta, delta_upper = 0.15, 0.2, 0.25
cube = Cube(np.zeros((ny, nx)), "air_temperature")
- level_coord = AuxCoord(0, "model_level_number")
+ level_coord = AuxCoord(0, "model_level_number", units="1")
cube.add_aux_coord(level_coord)
delta_coord = AuxCoord(
delta,
@@ -352,7 +355,10 @@ def test_hybrid_pressure_with_non_standard_coords(self):
units="Pa",
)
sigma_coord = AuxCoord(
- sigma, bounds=[[sigma_lower, sigma_upper]], long_name="mavis"
+ sigma,
+ bounds=[[sigma_lower, sigma_upper]],
+ long_name="mavis",
+ units="1",
)
surface_air_pressure_coord = AuxCoord(
np.zeros((ny, nx)), "surface_air_pressure", units="Pa"
diff --git a/lib/iris/tests/results/analysis/abs.cml b/lib/iris/tests/results/analysis/abs.cml
index e92f96e1cb..b0a37b6074 100644
--- a/lib/iris/tests/results/analysis/abs.cml
+++ b/lib/iris/tests/results/analysis/abs.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/addition.cml b/lib/iris/tests/results/analysis/addition.cml
index d673e73bb3..4f9600694d 100644
--- a/lib/iris/tests/results/analysis/addition.cml
+++ b/lib/iris/tests/results/analysis/addition.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/addition_coord_x.cml b/lib/iris/tests/results/analysis/addition_coord_x.cml
index af0c5ecc91..a086b8ad8b 100644
--- a/lib/iris/tests/results/analysis/addition_coord_x.cml
+++ b/lib/iris/tests/results/analysis/addition_coord_x.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/addition_coord_y.cml b/lib/iris/tests/results/analysis/addition_coord_y.cml
index ba8547b617..266e81c912 100644
--- a/lib/iris/tests/results/analysis/addition_coord_y.cml
+++ b/lib/iris/tests/results/analysis/addition_coord_y.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/addition_different_std_name.cml b/lib/iris/tests/results/analysis/addition_different_std_name.cml
index cb77adde99..14b0b42dd8 100644
--- a/lib/iris/tests/results/analysis/addition_different_std_name.cml
+++ b/lib/iris/tests/results/analysis/addition_different_std_name.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/addition_in_place.cml b/lib/iris/tests/results/analysis/addition_in_place.cml
index d673e73bb3..4f9600694d 100644
--- a/lib/iris/tests/results/analysis/addition_in_place.cml
+++ b/lib/iris/tests/results/analysis/addition_in_place.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/addition_in_place_coord.cml b/lib/iris/tests/results/analysis/addition_in_place_coord.cml
index 6ec39571c1..00dee609eb 100644
--- a/lib/iris/tests/results/analysis/addition_in_place_coord.cml
+++ b/lib/iris/tests/results/analysis/addition_in_place_coord.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/addition_scalar.cml b/lib/iris/tests/results/analysis/addition_scalar.cml
index d65d7492fe..daf0050069 100644
--- a/lib/iris/tests/results/analysis/addition_scalar.cml
+++ b/lib/iris/tests/results/analysis/addition_scalar.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/apply_ifunc.cml b/lib/iris/tests/results/analysis/apply_ifunc.cml
index f2bac40826..fe0e394ee6 100644
--- a/lib/iris/tests/results/analysis/apply_ifunc.cml
+++ b/lib/iris/tests/results/analysis/apply_ifunc.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml b/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml
index 2faa06f4a5..29cb6f611e 100644
--- a/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml
+++ b/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/apply_ufunc.cml b/lib/iris/tests/results/analysis/apply_ufunc.cml
index f2bac40826..fe0e394ee6 100644
--- a/lib/iris/tests/results/analysis/apply_ufunc.cml
+++ b/lib/iris/tests/results/analysis/apply_ufunc.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml b/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml
index d4239acbad..7b1511f028 100644
--- a/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml
+++ b/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/division.cml b/lib/iris/tests/results/analysis/division.cml
index bbe6c1eb90..762f51ec0a 100644
--- a/lib/iris/tests/results/analysis/division.cml
+++ b/lib/iris/tests/results/analysis/division.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/division_by_array.cml b/lib/iris/tests/results/analysis/division_by_array.cml
index cb77adde99..14b0b42dd8 100644
--- a/lib/iris/tests/results/analysis/division_by_array.cml
+++ b/lib/iris/tests/results/analysis/division_by_array.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/division_by_latitude.cml b/lib/iris/tests/results/analysis/division_by_latitude.cml
index 3e2abf69cd..42437d1e36 100644
--- a/lib/iris/tests/results/analysis/division_by_latitude.cml
+++ b/lib/iris/tests/results/analysis/division_by_latitude.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/division_by_longitude.cml b/lib/iris/tests/results/analysis/division_by_longitude.cml
index b1a0228dc8..264ce9b793 100644
--- a/lib/iris/tests/results/analysis/division_by_longitude.cml
+++ b/lib/iris/tests/results/analysis/division_by_longitude.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/division_by_singular_coord.cml b/lib/iris/tests/results/analysis/division_by_singular_coord.cml
index 7f7835a1be..4c9c58d760 100644
--- a/lib/iris/tests/results/analysis/division_by_singular_coord.cml
+++ b/lib/iris/tests/results/analysis/division_by_singular_coord.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/division_scalar.cml b/lib/iris/tests/results/analysis/division_scalar.cml
index cb77adde99..14b0b42dd8 100644
--- a/lib/iris/tests/results/analysis/division_scalar.cml
+++ b/lib/iris/tests/results/analysis/division_scalar.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/exponentiate.cml b/lib/iris/tests/results/analysis/exponentiate.cml
index a13c6be151..bb825f6714 100644
--- a/lib/iris/tests/results/analysis/exponentiate.cml
+++ b/lib/iris/tests/results/analysis/exponentiate.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml
index a9e69c291e..f027f2d9f8 100644
--- a/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml
+++ b/lib/iris/tests/results/analysis/first_quartile_foo_1d.cml
@@ -6,7 +6,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml
index a9e69c291e..f027f2d9f8 100644
--- a/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml
+++ b/lib/iris/tests/results/analysis/first_quartile_foo_1d_fast_percentile.cml
@@ -6,7 +6,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml
index 34c9e746f6..1bc809ce63 100644
--- a/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml
+++ b/lib/iris/tests/results/analysis/first_quartile_foo_2d.cml
@@ -11,7 +11,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml
index 34c9e746f6..1bc809ce63 100644
--- a/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml
+++ b/lib/iris/tests/results/analysis/first_quartile_foo_2d_fast_percentile.cml
@@ -11,7 +11,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml
index b3f135cede..cadd1e8b65 100644
--- a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml
+++ b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d.cml
@@ -9,7 +9,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml
index b3f135cede..cadd1e8b65 100644
--- a/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml
+++ b/lib/iris/tests/results/analysis/first_quartile_foo_bar_2d_fast_percentile.cml
@@ -9,7 +9,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml
index 80fab0e150..059541e208 100644
--- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml
+++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_masked.cml
@@ -9,7 +9,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml
index 80fab0e150..059541e208 100644
--- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml
+++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked.cml
@@ -9,7 +9,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml
index 80fab0e150..059541e208 100644
--- a/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml
+++ b/lib/iris/tests/results/analysis/last_quartile_foo_3d_notmasked_fast_percentile.cml
@@ -9,7 +9,7 @@
-
+
diff --git a/lib/iris/tests/results/analysis/log.cml b/lib/iris/tests/results/analysis/log.cml
index 33214d01f1..c24e071dc5 100644
--- a/lib/iris/tests/results/analysis/log.cml
+++ b/lib/iris/tests/results/analysis/log.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/log10.cml b/lib/iris/tests/results/analysis/log10.cml
index fbee8f73f0..abd4065526 100644
--- a/lib/iris/tests/results/analysis/log10.cml
+++ b/lib/iris/tests/results/analysis/log10.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/log2.cml b/lib/iris/tests/results/analysis/log2.cml
index 6371f3925b..d121ad9a9d 100644
--- a/lib/iris/tests/results/analysis/log2.cml
+++ b/lib/iris/tests/results/analysis/log2.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/multiply.cml b/lib/iris/tests/results/analysis/multiply.cml
index 44996a9138..8fb8658f5d 100644
--- a/lib/iris/tests/results/analysis/multiply.cml
+++ b/lib/iris/tests/results/analysis/multiply.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/multiply_different_std_name.cml b/lib/iris/tests/results/analysis/multiply_different_std_name.cml
index 49f1779b77..2d89e5882f 100644
--- a/lib/iris/tests/results/analysis/multiply_different_std_name.cml
+++ b/lib/iris/tests/results/analysis/multiply_different_std_name.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/sqrt.cml b/lib/iris/tests/results/analysis/sqrt.cml
index 3a7bff138c..0dd0fe20b3 100644
--- a/lib/iris/tests/results/analysis/sqrt.cml
+++ b/lib/iris/tests/results/analysis/sqrt.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/analysis/subtract.cml b/lib/iris/tests/results/analysis/subtract.cml
index 7b0740888d..3466578756 100644
--- a/lib/iris/tests/results/analysis/subtract.cml
+++ b/lib/iris/tests/results/analysis/subtract.cml
@@ -1,6 +1,9 @@
+