diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst
index 86476a357f..0c223f3414 100644
--- a/docs/src/whatsnew/latest.rst
+++ b/docs/src/whatsnew/latest.rst
@@ -222,6 +222,10 @@ This document explains the changes made to Iris for this release
bin in the system PATH.
(:pull:`4794`)
+#. `@pp-mo`_ split the module :mod:`iris.fileformats.netcdf` into separate
+ :mod:`~iris.fileformats.netcdf.loader` and :mod:`~iris.fileformats.netcdf.saver`
+ submodules, just to make the code easier to handle.
+
.. comment
Whatsnew author names (@github name) in alphabetical order. Note that,
diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py
index 6c802e00d4..a522d91313 100644
--- a/lib/iris/experimental/ugrid/load.py
+++ b/lib/iris/experimental/ugrid/load.py
@@ -8,8 +8,7 @@
Extensions to Iris' NetCDF loading to allow the construction of
:class:`~iris.experimental.ugrid.mesh.Mesh`\\ es from UGRID data in the file.
-Eventual destination: :mod:`iris.fileformats.netcdf` (plan to split that module
-into ``load`` and ``save`` in future).
+Eventual destination: :mod:`iris.fileformats.netcdf`.
"""
from contextlib import contextmanager
@@ -19,8 +18,8 @@
from ...config import get_logger
from ...coords import AuxCoord
-from ...fileformats import netcdf
from ...fileformats._nc_load_rules.helpers import get_attr_units, get_names
+from ...fileformats.netcdf import loader as nc_loader
from ...io import decode_uri, expand_filespecs
from ...util import guess_coord_axis
from .cf import (
@@ -202,7 +201,7 @@ def load_meshes(uris, var_name=None):
else:
handling_format_spec = FORMAT_AGENT.get_spec(source, None)
- if handling_format_spec.handler == netcdf.load_cubes:
+ if handling_format_spec.handler == nc_loader.load_cubes:
valid_sources.append(source)
else:
message = f"Ignoring non-NetCDF file: {source}"
@@ -239,7 +238,7 @@ def _build_aux_coord(coord_var, file_path):
assert isinstance(coord_var, CFUGridAuxiliaryCoordinateVariable)
attributes = {}
attr_units = get_attr_units(coord_var, attributes)
- points_data = netcdf._get_cf_var_data(coord_var, file_path)
+ points_data = nc_loader._get_cf_var_data(coord_var, file_path)
# Bounds will not be loaded:
# Bounds may be present, but the UGRID conventions state this would
@@ -293,7 +292,7 @@ def _build_connectivity(connectivity_var, file_path, element_dims):
assert isinstance(connectivity_var, CFUGridConnectivityVariable)
attributes = {}
attr_units = get_attr_units(connectivity_var, attributes)
- indices_data = netcdf._get_cf_var_data(connectivity_var, file_path)
+ indices_data = nc_loader._get_cf_var_data(connectivity_var, file_path)
cf_role = connectivity_var.cf_role
start_index = connectivity_var.start_index
@@ -462,7 +461,7 @@ def _build_mesh(cf, mesh_var, file_path):
)
mesh_elements = filter(None, mesh_elements)
for iris_object in mesh_elements:
- netcdf._add_unused_attributes(
+ nc_loader._add_unused_attributes(
iris_object, cf.cf_group[iris_object.var_name]
)
diff --git a/lib/iris/experimental/ugrid/save.py b/lib/iris/experimental/ugrid/save.py
index 8a5934b939..3c42137905 100644
--- a/lib/iris/experimental/ugrid/save.py
+++ b/lib/iris/experimental/ugrid/save.py
@@ -8,8 +8,7 @@
Extensions to Iris' NetCDF saving to allow
:class:`~iris.experimental.ugrid.mesh.Mesh` saving in UGRID format.
-Eventual destination: :mod:`iris.fileformats.netcdf` (plan to split that module
-into ``load`` and ``save`` in future).
+Eventual destination: :mod:`iris.fileformats.netcdf`.
"""
from collections.abc import Iterable
diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py
index e94fe99185..0127c68059 100644
--- a/lib/iris/fileformats/_nc_load_rules/helpers.py
+++ b/lib/iris/fileformats/_nc_load_rules/helpers.py
@@ -31,9 +31,9 @@
import iris.fileformats.netcdf
from iris.fileformats.netcdf import (
UnknownCellMethodWarning,
- _get_cf_var_data,
parse_cell_methods,
)
+from iris.fileformats.netcdf.loader import _get_cf_var_data
import iris.std_names
import iris.util
diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py
new file mode 100644
index 0000000000..505e173b0b
--- /dev/null
+++ b/lib/iris/fileformats/netcdf/__init__.py
@@ -0,0 +1,49 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""
+Module to support the loading and saving of NetCDF files, also using the CF conventions
+for metadata interpretation.
+
+See : `NetCDF User's Guide `_
+and `netCDF4 python module `_.
+
+Also : `CF Conventions `_.
+
+"""
+import iris.config
+
+# Note: *must* be done before importing from submodules, as they also use this !
+logger = iris.config.get_logger(__name__)
+
+from .loader import DEBUG, NetCDFDataProxy, load_cubes
+from .saver import (
+ CF_CONVENTIONS_VERSION,
+ MESH_ELEMENTS,
+ SPATIO_TEMPORAL_AXES,
+ CFNameCoordMap,
+ Saver,
+ UnknownCellMethodWarning,
+ parse_cell_methods,
+ save,
+)
+
+# Export all public elements from the loader and saver submodules.
+# NOTE: the separation is purely for neatness and developer convenience; from
+# the user point of view, it is still all one module.
+__all__ = (
+ "CFNameCoordMap",
+ "CF_CONVENTIONS_VERSION",
+ "DEBUG",
+ "MESH_ELEMENTS",
+ "NetCDFDataProxy",
+ "SPATIO_TEMPORAL_AXES",
+ "Saver",
+ "UnknownCellMethodWarning",
+ "load_cubes",
+ "logger",
+ "parse_cell_methods",
+ "save",
+)
diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py
new file mode 100644
index 0000000000..95f394c70d
--- /dev/null
+++ b/lib/iris/fileformats/netcdf/loader.py
@@ -0,0 +1,594 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""
+Module to support the loading of Iris cubes from NetCDF files, also using the CF
+conventions for metadata interpretation.
+
+See : `NetCDF User's Guide `_
+and `netCDF4 python module `_.
+
+Also : `CF Conventions `_.
+
+"""
+import warnings
+
+import netCDF4
+import numpy as np
+
+from iris._lazy_data import as_lazy_data
+from iris.aux_factory import (
+ AtmosphereSigmaFactory,
+ HybridHeightFactory,
+ HybridPressureFactory,
+ OceanSFactory,
+ OceanSg1Factory,
+ OceanSg2Factory,
+ OceanSigmaFactory,
+ OceanSigmaZFactory,
+)
+import iris.config
+import iris.coord_systems
+import iris.coords
+import iris.exceptions
+import iris.fileformats.cf
+from iris.fileformats.netcdf.saver import _CF_ATTRS
+import iris.io
+import iris.util
+
+# Show actions activation statistics.
+DEBUG = False
+
+# Get the logger : shared logger for all in 'iris.fileformats.netcdf'.
+from . import logger
+
+
+def _actions_engine():
+ # Return an 'actions engine', which provides a pyke-rules-like interface to
+ # the core cf translation code.
+ # Deferred import to avoid circularity.
+ import iris.fileformats._nc_load_rules.engine as nc_actions_engine
+
+ engine = nc_actions_engine.Engine()
+ return engine
+
+
+class NetCDFDataProxy:
+ """A reference to the data payload of a single NetCDF file variable."""
+
+ __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value")
+
+ def __init__(self, shape, dtype, path, variable_name, fill_value):
+ self.shape = shape
+ self.dtype = dtype
+ self.path = path
+ self.variable_name = variable_name
+ self.fill_value = fill_value
+
+ @property
+ def ndim(self):
+ return len(self.shape)
+
+ def __getitem__(self, keys):
+ dataset = netCDF4.Dataset(self.path)
+ try:
+ variable = dataset.variables[self.variable_name]
+ # Get the NetCDF variable data and slice.
+ var = variable[keys]
+ finally:
+ dataset.close()
+ return np.asanyarray(var)
+
+ def __repr__(self):
+ fmt = (
+ "<{self.__class__.__name__} shape={self.shape}"
+ " dtype={self.dtype!r} path={self.path!r}"
+ " variable_name={self.variable_name!r}>"
+ )
+ return fmt.format(self=self)
+
+ def __getstate__(self):
+ return {attr: getattr(self, attr) for attr in self.__slots__}
+
+ def __setstate__(self, state):
+ for key, value in state.items():
+ setattr(self, key, value)
+
+
+def _assert_case_specific_facts(engine, cf, cf_group):
+ # Initialise a data store for built cube elements.
+ # This is used to patch element attributes *not* setup by the actions
+ # process, after the actions code has run.
+ engine.cube_parts["coordinates"] = []
+ engine.cube_parts["cell_measures"] = []
+ engine.cube_parts["ancillary_variables"] = []
+
+ # Assert facts for CF coordinates.
+ for cf_name in cf_group.coordinates.keys():
+ engine.add_case_specific_fact("coordinate", (cf_name,))
+
+ # Assert facts for CF auxiliary coordinates.
+ for cf_name in cf_group.auxiliary_coordinates.keys():
+ engine.add_case_specific_fact("auxiliary_coordinate", (cf_name,))
+
+ # Assert facts for CF cell measures.
+ for cf_name in cf_group.cell_measures.keys():
+ engine.add_case_specific_fact("cell_measure", (cf_name,))
+
+ # Assert facts for CF ancillary variables.
+ for cf_name in cf_group.ancillary_variables.keys():
+ engine.add_case_specific_fact("ancillary_variable", (cf_name,))
+
+ # Assert facts for CF grid_mappings.
+ for cf_name in cf_group.grid_mappings.keys():
+ engine.add_case_specific_fact("grid_mapping", (cf_name,))
+
+ # Assert facts for CF labels.
+ for cf_name in cf_group.labels.keys():
+ engine.add_case_specific_fact("label", (cf_name,))
+
+ # Assert facts for CF formula terms associated with the cf_group
+ # of the CF data variable.
+
+ # Collect varnames of formula-root variables as we go.
+ # NOTE: use dictionary keys as an 'OrderedSet'
+ # - see: https://stackoverflow.com/a/53657523/2615050
+ # This is to ensure that we can handle the resulting facts in a definite
+ # order, as using a 'set' led to indeterminate results.
+ formula_root = {}
+ for cf_var in cf.cf_group.formula_terms.values():
+ for cf_root, cf_term in cf_var.cf_terms_by_root.items():
+ # Only assert this fact if the formula root variable is
+ # defined in the CF group of the CF data variable.
+ if cf_root in cf_group:
+ formula_root[cf_root] = True
+ engine.add_case_specific_fact(
+ "formula_term",
+ (cf_var.cf_name, cf_root, cf_term),
+ )
+
+ for cf_root in formula_root.keys():
+ engine.add_case_specific_fact("formula_root", (cf_root,))
+
+
+def _actions_activation_stats(engine, cf_name):
+ print("-" * 80)
+ print("CF Data Variable: %r" % cf_name)
+
+ engine.print_stats()
+
+ print("Rules Triggered:")
+
+ for rule in sorted(list(engine.rules_triggered)):
+ print("\t%s" % rule)
+
+ print("Case Specific Facts:")
+ kb_facts = engine.get_kb()
+
+ for key in kb_facts.entity_lists.keys():
+ for arg in kb_facts.entity_lists[key].case_specific_facts:
+ print("\t%s%s" % (key, arg))
+
+
+def _set_attributes(attributes, key, value):
+ """Set attributes dictionary, converting unicode strings appropriately."""
+
+ if isinstance(value, str):
+ try:
+ attributes[str(key)] = str(value)
+ except UnicodeEncodeError:
+ attributes[str(key)] = value
+ else:
+ attributes[str(key)] = value
+
+
+def _add_unused_attributes(iris_object, cf_var):
+ """
+ Populate the attributes of a cf element with the "unused" attributes
+ from the associated CF-netCDF variable. That is, all those that aren't CF
+ reserved terms.
+
+ """
+
+ def attribute_predicate(item):
+ return item[0] not in _CF_ATTRS
+
+ tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused())
+ for attr_name, attr_value in tmpvar:
+ _set_attributes(iris_object.attributes, attr_name, attr_value)
+
+
+def _get_actual_dtype(cf_var):
+ # Figure out what the eventual data type will be after any scale/offset
+ # transforms.
+ dummy_data = np.zeros(1, dtype=cf_var.dtype)
+ if hasattr(cf_var, "scale_factor"):
+ dummy_data = cf_var.scale_factor * dummy_data
+ if hasattr(cf_var, "add_offset"):
+ dummy_data = cf_var.add_offset + dummy_data
+ return dummy_data.dtype
+
+
+def _get_cf_var_data(cf_var, filename):
+ # Get lazy chunked data out of a cf variable.
+ dtype = _get_actual_dtype(cf_var)
+
+ # Create cube with deferred data, but no metadata
+ fill_value = getattr(
+ cf_var.cf_data,
+ "_FillValue",
+ netCDF4.default_fillvals[cf_var.dtype.str[1:]],
+ )
+ proxy = NetCDFDataProxy(
+ cf_var.shape, dtype, filename, cf_var.cf_name, fill_value
+ )
+ # Get the chunking specified for the variable : this is either a shape, or
+ # maybe the string "contiguous".
+ chunks = cf_var.cf_data.chunking()
+ # In the "contiguous" case, pass chunks=None to 'as_lazy_data'.
+ if chunks == "contiguous":
+ chunks = None
+ return as_lazy_data(proxy, chunks=chunks)
+
+
+class _OrderedAddableList(list):
+ """
+ A custom container object for actions recording.
+
+ Used purely in actions debugging, to accumulate a record of which actions
+ were activated.
+
+ It replaces a set, so as to preserve the ordering of operations, with
+ possible repeats, and it also numbers the entries.
+
+ The actions routines invoke an 'add' method, so this effectively replaces
+ a set.add with a list.append.
+
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._n_add = 0
+
+ def add(self, msg):
+ self._n_add += 1
+ n_add = self._n_add
+ self.append(f"#{n_add:03d} : {msg}")
+
+
+def _load_cube(engine, cf, cf_var, filename):
+ from iris.cube import Cube
+
+ """Create the cube associated with the CF-netCDF data variable."""
+ data = _get_cf_var_data(cf_var, filename)
+ cube = Cube(data)
+
+ # Reset the actions engine.
+ engine.reset()
+
+ # Initialise engine rule processing hooks.
+ engine.cf_var = cf_var
+ engine.cube = cube
+ engine.cube_parts = {}
+ engine.requires = {}
+ engine.rules_triggered = _OrderedAddableList()
+ engine.filename = filename
+
+ # Assert all the case-specific facts.
+ # This extracts 'facts' specific to this data-variable (aka cube), from
+ # the info supplied in the CFGroup object.
+ _assert_case_specific_facts(engine, cf, cf_var.cf_group)
+
+ # Run the actions engine.
+ # This creates various cube elements and attaches them to the cube.
+ # It also records various other info on the engine, to be processed later.
+ engine.activate()
+
+ # Having run the rules, now add the "unused" attributes to each cf element.
+ def fix_attributes_all_elements(role_name):
+ elements_and_names = engine.cube_parts.get(role_name, [])
+
+ for iris_object, cf_var_name in elements_and_names:
+ _add_unused_attributes(iris_object, cf.cf_group[cf_var_name])
+
+ # Populate the attributes of all coordinates, cell-measures and ancillary-vars.
+ fix_attributes_all_elements("coordinates")
+ fix_attributes_all_elements("ancillary_variables")
+ fix_attributes_all_elements("cell_measures")
+
+ # Also populate attributes of the top-level cube itself.
+ _add_unused_attributes(cube, cf_var)
+
+ # Work out reference names for all the coords.
+ names = {
+ coord.var_name: coord.standard_name or coord.var_name or "unknown"
+ for coord in cube.coords()
+ }
+
+ # Add all the cube cell methods.
+ cube.cell_methods = [
+ iris.coords.CellMethod(
+ method=method.method,
+ intervals=method.intervals,
+ comments=method.comments,
+ coords=[
+ names[coord_name] if coord_name in names else coord_name
+ for coord_name in method.coord_names
+ ],
+ )
+ for method in cube.cell_methods
+ ]
+
+ if DEBUG:
+ # Show activation statistics for this data-var (i.e. cube).
+ _actions_activation_stats(engine, cf_var.cf_name)
+
+ return cube
+
+
+def _load_aux_factory(engine, cube):
+ """
+ Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory.
+
+ """
+ formula_type = engine.requires.get("formula_type")
+ if formula_type in [
+ "atmosphere_sigma_coordinate",
+ "atmosphere_hybrid_height_coordinate",
+ "atmosphere_hybrid_sigma_pressure_coordinate",
+ "ocean_sigma_z_coordinate",
+ "ocean_sigma_coordinate",
+ "ocean_s_coordinate",
+ "ocean_s_coordinate_g1",
+ "ocean_s_coordinate_g2",
+ ]:
+
+ def coord_from_term(term):
+ # Convert term names to coordinates (via netCDF variable names).
+ name = engine.requires["formula_terms"].get(term, None)
+ if name is not None:
+ for coord, cf_var_name in engine.cube_parts["coordinates"]:
+ if cf_var_name == name:
+ return coord
+ warnings.warn(
+ "Unable to find coordinate for variable "
+ "{!r}".format(name)
+ )
+
+ if formula_type == "atmosphere_sigma_coordinate":
+ pressure_at_top = coord_from_term("ptop")
+ sigma = coord_from_term("sigma")
+ surface_air_pressure = coord_from_term("ps")
+ factory = AtmosphereSigmaFactory(
+ pressure_at_top, sigma, surface_air_pressure
+ )
+ elif formula_type == "atmosphere_hybrid_height_coordinate":
+ delta = coord_from_term("a")
+ sigma = coord_from_term("b")
+ orography = coord_from_term("orog")
+ factory = HybridHeightFactory(delta, sigma, orography)
+ elif formula_type == "atmosphere_hybrid_sigma_pressure_coordinate":
+ # Hybrid pressure has two valid versions of its formula terms:
+ # "p0: var1 a: var2 b: var3 ps: var4" or
+ # "ap: var1 b: var2 ps: var3" where "ap = p0 * a"
+ # Attempt to get the "ap" term.
+ delta = coord_from_term("ap")
+ if delta is None:
+ # The "ap" term is unavailable, so try getting terms "p0"
+ # and "a" terms in order to derive an "ap" equivalent term.
+ coord_p0 = coord_from_term("p0")
+ if coord_p0 is not None:
+ if coord_p0.shape != (1,):
+ msg = (
+ "Expecting {!r} to be a scalar reference "
+ "pressure coordinate, got shape {!r}".format(
+ coord_p0.var_name, coord_p0.shape
+ )
+ )
+ raise ValueError(msg)
+ if coord_p0.has_bounds():
+ msg = (
+ "Ignoring atmosphere hybrid sigma pressure "
+ "scalar coordinate {!r} bounds.".format(
+ coord_p0.name()
+ )
+ )
+ warnings.warn(msg)
+ coord_a = coord_from_term("a")
+ if coord_a is not None:
+ if coord_a.units.is_unknown():
+ # Be graceful, and promote unknown to dimensionless units.
+ coord_a.units = "1"
+ delta = coord_a * coord_p0.points[0]
+ delta.units = coord_a.units * coord_p0.units
+ delta.rename("vertical pressure")
+ delta.var_name = "ap"
+ cube.add_aux_coord(delta, cube.coord_dims(coord_a))
+
+ sigma = coord_from_term("b")
+ surface_air_pressure = coord_from_term("ps")
+ factory = HybridPressureFactory(delta, sigma, surface_air_pressure)
+ elif formula_type == "ocean_sigma_z_coordinate":
+ sigma = coord_from_term("sigma")
+ eta = coord_from_term("eta")
+ depth = coord_from_term("depth")
+ depth_c = coord_from_term("depth_c")
+ nsigma = coord_from_term("nsigma")
+ zlev = coord_from_term("zlev")
+ factory = OceanSigmaZFactory(
+ sigma, eta, depth, depth_c, nsigma, zlev
+ )
+ elif formula_type == "ocean_sigma_coordinate":
+ sigma = coord_from_term("sigma")
+ eta = coord_from_term("eta")
+ depth = coord_from_term("depth")
+ factory = OceanSigmaFactory(sigma, eta, depth)
+ elif formula_type == "ocean_s_coordinate":
+ s = coord_from_term("s")
+ eta = coord_from_term("eta")
+ depth = coord_from_term("depth")
+ a = coord_from_term("a")
+ depth_c = coord_from_term("depth_c")
+ b = coord_from_term("b")
+ factory = OceanSFactory(s, eta, depth, a, b, depth_c)
+ elif formula_type == "ocean_s_coordinate_g1":
+ s = coord_from_term("s")
+ c = coord_from_term("c")
+ eta = coord_from_term("eta")
+ depth = coord_from_term("depth")
+ depth_c = coord_from_term("depth_c")
+ factory = OceanSg1Factory(s, c, eta, depth, depth_c)
+ elif formula_type == "ocean_s_coordinate_g2":
+ s = coord_from_term("s")
+ c = coord_from_term("c")
+ eta = coord_from_term("eta")
+ depth = coord_from_term("depth")
+ depth_c = coord_from_term("depth_c")
+ factory = OceanSg2Factory(s, c, eta, depth, depth_c)
+ cube.add_aux_factory(factory)
+
+
+def _translate_constraints_to_var_callback(constraints):
+ """
+ Translate load constraints into a simple data-var filter function, if possible.
+
+ Returns:
+ * function(cf_var:CFDataVariable): --> bool,
+ or None.
+
+ For now, ONLY handles a single NameConstraint with no 'STASH' component.
+
+ """
+ import iris._constraints
+
+ constraints = iris._constraints.list_of_constraints(constraints)
+ result = None
+ if len(constraints) == 1:
+ (constraint,) = constraints
+ if (
+ isinstance(constraint, iris._constraints.NameConstraint)
+ and constraint.STASH == "none"
+ ):
+ # As long as it doesn't use a STASH match, then we can treat it as
+ # a testing against name properties of cf_var.
+ # That's just like testing against name properties of a cube, except that they may not all exist.
+ def inner(cf_datavar):
+ match = True
+ for name in constraint._names:
+ expected = getattr(constraint, name)
+ if name != "STASH" and expected != "none":
+ attr_name = "cf_name" if name == "var_name" else name
+ # Fetch property : N.B. CFVariable caches the property values
+ # The use of a default here is the only difference from the code in NameConstraint.
+ if not hasattr(cf_datavar, attr_name):
+ continue
+ actual = getattr(cf_datavar, attr_name, "")
+ if actual != expected:
+ match = False
+ break
+ return match
+
+ result = inner
+ return result
+
+
+def load_cubes(filenames, callback=None, constraints=None):
+ """
+ Loads cubes from a list of NetCDF filenames/OPeNDAP URLs.
+
+ Args:
+
+ * filenames (string/list):
+ One or more NetCDF filenames/OPeNDAP URLs to load from.
+
+ Kwargs:
+
+ * callback (callable function):
+ Function which can be passed on to :func:`iris.io.run_callback`.
+
+ Returns:
+ Generator of loaded NetCDF :class:`iris.cube.Cube`.
+
+ """
+ # TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded
+ # into standard behaviour.
+ # Deferred import to avoid circular imports.
+ from iris.experimental.ugrid.cf import CFUGridReader
+ from iris.experimental.ugrid.load import (
+ PARSE_UGRID_ON_LOAD,
+ _build_mesh_coords,
+ _meshes_from_cf,
+ )
+ from iris.io import run_callback
+
+ # Create a low-level data-var filter from the original load constraints, if they are suitable.
+ var_callback = _translate_constraints_to_var_callback(constraints)
+
+ # Create an actions engine.
+ engine = _actions_engine()
+
+ if isinstance(filenames, str):
+ filenames = [filenames]
+
+ for filename in filenames:
+ # Ingest the netCDF file.
+ meshes = {}
+ if PARSE_UGRID_ON_LOAD:
+ cf = CFUGridReader(filename)
+ meshes = _meshes_from_cf(cf)
+ else:
+ cf = iris.fileformats.cf.CFReader(filename)
+
+ # Process each CF data variable.
+ data_variables = list(cf.cf_group.data_variables.values()) + list(
+ cf.cf_group.promoted.values()
+ )
+ for cf_var in data_variables:
+ if var_callback and not var_callback(cf_var):
+ # Deliver only selected results.
+ continue
+
+ # cf_var-specific mesh handling, if a mesh is present.
+ # Build the mesh_coords *before* loading the cube - avoids
+ # mesh-related attributes being picked up by
+ # _add_unused_attributes().
+ mesh_name = None
+ mesh = None
+ mesh_coords, mesh_dim = [], None
+ if PARSE_UGRID_ON_LOAD:
+ mesh_name = getattr(cf_var, "mesh", None)
+ if mesh_name is not None:
+ try:
+ mesh = meshes[mesh_name]
+ except KeyError:
+ message = (
+ f"File does not contain mesh: '{mesh_name}' - "
+ f"referenced by variable: '{cf_var.cf_name}' ."
+ )
+ logger.debug(message)
+ if mesh is not None:
+ mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var)
+
+ cube = _load_cube(engine, cf, cf_var, filename)
+
+ # Attach the mesh (if present) to the cube.
+ for mesh_coord in mesh_coords:
+ cube.add_aux_coord(mesh_coord, mesh_dim)
+
+ # Process any associated formula terms and attach
+ # the corresponding AuxCoordFactory.
+ try:
+ _load_aux_factory(engine, cube)
+ except ValueError as e:
+ warnings.warn("{}".format(e))
+
+ # Perform any user registered callback function.
+ cube = run_callback(callback, cube, cf_var, filename)
+
+ # Callback mechanism may return None, which must not be yielded
+ if cube is None:
+ continue
+
+ yield cube
diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf/saver.py
similarity index 83%
rename from lib/iris/fileformats/netcdf.py
rename to lib/iris/fileformats/netcdf/saver.py
index 6a7b37a1cc..650c5e3338 100644
--- a/lib/iris/fileformats/netcdf.py
+++ b/lib/iris/fileformats/netcdf/saver.py
@@ -4,16 +4,16 @@
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
-Module to support the loading of a NetCDF file into an Iris cube.
+Module to support the saving of Iris cubes to a NetCDF file, also using the CF
+conventions for metadata interpretation.
-See also: `netCDF4 python `_
+See : `NetCDF User's Guide `_
+and `netCDF4 python module `_.
-Also refer to document 'NetCDF Climate and Forecast (CF) Metadata Conventions'.
+Also : `CF Conventions `_.
"""
-
import collections
-import collections.abc
from itertools import repeat, zip_longest
import os
import os.path
@@ -28,7 +28,7 @@
import numpy as np
import numpy.ma as ma
-from iris._lazy_data import _co_realise_lazy_arrays, as_lazy_data, is_lazy_data
+from iris._lazy_data import _co_realise_lazy_arrays, is_lazy_data
from iris.aux_factory import (
AtmosphereSigmaFactory,
HybridHeightFactory,
@@ -48,28 +48,17 @@
import iris.io
import iris.util
-# Show actions activation statistics.
-DEBUG = False
+# Get the logger : shared logger for all in 'iris.fileformats.netcdf'.
+from . import logger
-# Configure the logger.
-logger = iris.config.get_logger(__name__)
+# Avoid warning about unused import.
+# We could use an __all__, but we don't want to maintain one here
+logger
# Standard CML spatio-temporal axis names.
SPATIO_TEMPORAL_AXES = ["t", "z", "y", "x"]
-# Pass through CF attributes:
-# - comment
-# - Conventions
-# - flag_masks
-# - flag_meanings
-# - flag_values
-# - history
-# - institution
-# - reference
-# - source
-# - title
-# - positive
-#
+# The CF-meaningful attributes which may appear on a data variable.
_CF_ATTRS = [
"add_offset",
"ancillary_variables",
@@ -447,555 +436,6 @@ def coord(self, name):
return result
-def _actions_engine():
- # Return an 'actions engine', which provides a pyke-rules-like interface to
- # the core cf translation code.
- # Deferred import to avoid circularity.
- import iris.fileformats._nc_load_rules.engine as nc_actions_engine
-
- engine = nc_actions_engine.Engine()
- return engine
-
-
-class NetCDFDataProxy:
- """A reference to the data payload of a single NetCDF file variable."""
-
- __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value")
-
- def __init__(self, shape, dtype, path, variable_name, fill_value):
- self.shape = shape
- self.dtype = dtype
- self.path = path
- self.variable_name = variable_name
- self.fill_value = fill_value
-
- @property
- def ndim(self):
- return len(self.shape)
-
- def __getitem__(self, keys):
- dataset = netCDF4.Dataset(self.path)
- try:
- variable = dataset.variables[self.variable_name]
- # Get the NetCDF variable data and slice.
- var = variable[keys]
- finally:
- dataset.close()
- return np.asanyarray(var)
-
- def __repr__(self):
- fmt = (
- "<{self.__class__.__name__} shape={self.shape}"
- " dtype={self.dtype!r} path={self.path!r}"
- " variable_name={self.variable_name!r}>"
- )
- return fmt.format(self=self)
-
- def __getstate__(self):
- return {attr: getattr(self, attr) for attr in self.__slots__}
-
- def __setstate__(self, state):
- for key, value in state.items():
- setattr(self, key, value)
-
-
-def _assert_case_specific_facts(engine, cf, cf_group):
- # Initialise a data store for built cube elements.
- # This is used to patch element attributes *not* setup by the actions
- # process, after the actions code has run.
- engine.cube_parts["coordinates"] = []
- engine.cube_parts["cell_measures"] = []
- engine.cube_parts["ancillary_variables"] = []
-
- # Assert facts for CF coordinates.
- for cf_name in cf_group.coordinates.keys():
- engine.add_case_specific_fact("coordinate", (cf_name,))
-
- # Assert facts for CF auxiliary coordinates.
- for cf_name in cf_group.auxiliary_coordinates.keys():
- engine.add_case_specific_fact("auxiliary_coordinate", (cf_name,))
-
- # Assert facts for CF cell measures.
- for cf_name in cf_group.cell_measures.keys():
- engine.add_case_specific_fact("cell_measure", (cf_name,))
-
- # Assert facts for CF ancillary variables.
- for cf_name in cf_group.ancillary_variables.keys():
- engine.add_case_specific_fact("ancillary_variable", (cf_name,))
-
- # Assert facts for CF grid_mappings.
- for cf_name in cf_group.grid_mappings.keys():
- engine.add_case_specific_fact("grid_mapping", (cf_name,))
-
- # Assert facts for CF labels.
- for cf_name in cf_group.labels.keys():
- engine.add_case_specific_fact("label", (cf_name,))
-
- # Assert facts for CF formula terms associated with the cf_group
- # of the CF data variable.
-
- # Collect varnames of formula-root variables as we go.
- # NOTE: use dictionary keys as an 'OrderedSet'
- # - see: https://stackoverflow.com/a/53657523/2615050
- # This is to ensure that we can handle the resulting facts in a definite
- # order, as using a 'set' led to indeterminate results.
- formula_root = {}
- for cf_var in cf.cf_group.formula_terms.values():
- for cf_root, cf_term in cf_var.cf_terms_by_root.items():
- # Only assert this fact if the formula root variable is
- # defined in the CF group of the CF data variable.
- if cf_root in cf_group:
- formula_root[cf_root] = True
- engine.add_case_specific_fact(
- "formula_term",
- (cf_var.cf_name, cf_root, cf_term),
- )
-
- for cf_root in formula_root.keys():
- engine.add_case_specific_fact("formula_root", (cf_root,))
-
-
-def _actions_activation_stats(engine, cf_name):
- print("-" * 80)
- print("CF Data Variable: %r" % cf_name)
-
- engine.print_stats()
-
- print("Rules Triggered:")
-
- for rule in sorted(list(engine.rules_triggered)):
- print("\t%s" % rule)
-
- print("Case Specific Facts:")
- kb_facts = engine.get_kb()
-
- for key in kb_facts.entity_lists.keys():
- for arg in kb_facts.entity_lists[key].case_specific_facts:
- print("\t%s%s" % (key, arg))
-
-
-def _set_attributes(attributes, key, value):
- """Set attributes dictionary, converting unicode strings appropriately."""
-
- if isinstance(value, str):
- try:
- attributes[str(key)] = str(value)
- except UnicodeEncodeError:
- attributes[str(key)] = value
- else:
- attributes[str(key)] = value
-
-
-def _add_unused_attributes(iris_object, cf_var):
- """
- Populate the attributes of a cf element with the "unused" attributes
- from the associated CF-netCDF variable. That is, all those that aren't CF
- reserved terms.
-
- """
-
- def attribute_predicate(item):
- return item[0] not in _CF_ATTRS
-
- tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused())
- for attr_name, attr_value in tmpvar:
- _set_attributes(iris_object.attributes, attr_name, attr_value)
-
-
-def _get_actual_dtype(cf_var):
- # Figure out what the eventual data type will be after any scale/offset
- # transforms.
- dummy_data = np.zeros(1, dtype=cf_var.dtype)
- if hasattr(cf_var, "scale_factor"):
- dummy_data = cf_var.scale_factor * dummy_data
- if hasattr(cf_var, "add_offset"):
- dummy_data = cf_var.add_offset + dummy_data
- return dummy_data.dtype
-
-
-def _get_cf_var_data(cf_var, filename):
- # Get lazy chunked data out of a cf variable.
- dtype = _get_actual_dtype(cf_var)
-
- # Create cube with deferred data, but no metadata
- fill_value = getattr(
- cf_var.cf_data,
- "_FillValue",
- netCDF4.default_fillvals[cf_var.dtype.str[1:]],
- )
- proxy = NetCDFDataProxy(
- cf_var.shape, dtype, filename, cf_var.cf_name, fill_value
- )
- # Get the chunking specified for the variable : this is either a shape, or
- # maybe the string "contiguous".
- chunks = cf_var.cf_data.chunking()
- # In the "contiguous" case, pass chunks=None to 'as_lazy_data'.
- if chunks == "contiguous":
- chunks = None
- return as_lazy_data(proxy, chunks=chunks)
-
-
-class _OrderedAddableList(list):
- """
- A custom container object for actions recording.
-
- Used purely in actions debugging, to accumulate a record of which actions
- were activated.
-
- It replaces a set, so as to preserve the ordering of operations, with
- possible repeats, and it also numbers the entries.
-
- The actions routines invoke an 'add' method, so this effectively replaces
- a set.add with a list.append.
-
- """
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self._n_add = 0
-
- def add(self, msg):
- self._n_add += 1
- n_add = self._n_add
- self.append(f"#{n_add:03d} : {msg}")
-
-
-def _load_cube(engine, cf, cf_var, filename):
- from iris.cube import Cube
-
- """Create the cube associated with the CF-netCDF data variable."""
- data = _get_cf_var_data(cf_var, filename)
- cube = Cube(data)
-
- # Reset the actions engine.
- engine.reset()
-
- # Initialise engine rule processing hooks.
- engine.cf_var = cf_var
- engine.cube = cube
- engine.cube_parts = {}
- engine.requires = {}
- engine.rules_triggered = _OrderedAddableList()
- engine.filename = filename
-
- # Assert all the case-specific facts.
- # This extracts 'facts' specific to this data-variable (aka cube), from
- # the info supplied in the CFGroup object.
- _assert_case_specific_facts(engine, cf, cf_var.cf_group)
-
- # Run the actions engine.
- # This creates various cube elements and attaches them to the cube.
- # It also records various other info on the engine, to be processed later.
- engine.activate()
-
- # Having run the rules, now add the "unused" attributes to each cf element.
- def fix_attributes_all_elements(role_name):
- elements_and_names = engine.cube_parts.get(role_name, [])
-
- for iris_object, cf_var_name in elements_and_names:
- _add_unused_attributes(iris_object, cf.cf_group[cf_var_name])
-
- # Populate the attributes of all coordinates, cell-measures and ancillary-vars.
- fix_attributes_all_elements("coordinates")
- fix_attributes_all_elements("ancillary_variables")
- fix_attributes_all_elements("cell_measures")
-
- # Also populate attributes of the top-level cube itself.
- _add_unused_attributes(cube, cf_var)
-
- # Work out reference names for all the coords.
- names = {
- coord.var_name: coord.standard_name or coord.var_name or "unknown"
- for coord in cube.coords()
- }
-
- # Add all the cube cell methods.
- cube.cell_methods = [
- iris.coords.CellMethod(
- method=method.method,
- intervals=method.intervals,
- comments=method.comments,
- coords=[
- names[coord_name] if coord_name in names else coord_name
- for coord_name in method.coord_names
- ],
- )
- for method in cube.cell_methods
- ]
-
- if DEBUG:
- # Show activation statistics for this data-var (i.e. cube).
- _actions_activation_stats(engine, cf_var.cf_name)
-
- return cube
-
-
-def _load_aux_factory(engine, cube):
- """
- Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory.
-
- """
- formula_type = engine.requires.get("formula_type")
- if formula_type in [
- "atmosphere_sigma_coordinate",
- "atmosphere_hybrid_height_coordinate",
- "atmosphere_hybrid_sigma_pressure_coordinate",
- "ocean_sigma_z_coordinate",
- "ocean_sigma_coordinate",
- "ocean_s_coordinate",
- "ocean_s_coordinate_g1",
- "ocean_s_coordinate_g2",
- ]:
-
- def coord_from_term(term):
- # Convert term names to coordinates (via netCDF variable names).
- name = engine.requires["formula_terms"].get(term, None)
- if name is not None:
- for coord, cf_var_name in engine.cube_parts["coordinates"]:
- if cf_var_name == name:
- return coord
- warnings.warn(
- "Unable to find coordinate for variable "
- "{!r}".format(name)
- )
-
- if formula_type == "atmosphere_sigma_coordinate":
- pressure_at_top = coord_from_term("ptop")
- sigma = coord_from_term("sigma")
- surface_air_pressure = coord_from_term("ps")
- factory = AtmosphereSigmaFactory(
- pressure_at_top, sigma, surface_air_pressure
- )
- elif formula_type == "atmosphere_hybrid_height_coordinate":
- delta = coord_from_term("a")
- sigma = coord_from_term("b")
- orography = coord_from_term("orog")
- factory = HybridHeightFactory(delta, sigma, orography)
- elif formula_type == "atmosphere_hybrid_sigma_pressure_coordinate":
- # Hybrid pressure has two valid versions of its formula terms:
- # "p0: var1 a: var2 b: var3 ps: var4" or
- # "ap: var1 b: var2 ps: var3" where "ap = p0 * a"
- # Attempt to get the "ap" term.
- delta = coord_from_term("ap")
- if delta is None:
- # The "ap" term is unavailable, so try getting terms "p0"
- # and "a" terms in order to derive an "ap" equivalent term.
- coord_p0 = coord_from_term("p0")
- if coord_p0 is not None:
- if coord_p0.shape != (1,):
- msg = (
- "Expecting {!r} to be a scalar reference "
- "pressure coordinate, got shape {!r}".format(
- coord_p0.var_name, coord_p0.shape
- )
- )
- raise ValueError(msg)
- if coord_p0.has_bounds():
- msg = (
- "Ignoring atmosphere hybrid sigma pressure "
- "scalar coordinate {!r} bounds.".format(
- coord_p0.name()
- )
- )
- warnings.warn(msg)
- coord_a = coord_from_term("a")
- if coord_a is not None:
- if coord_a.units.is_unknown():
- # Be graceful, and promote unknown to dimensionless units.
- coord_a.units = "1"
- delta = coord_a * coord_p0.points[0]
- delta.units = coord_a.units * coord_p0.units
- delta.rename("vertical pressure")
- delta.var_name = "ap"
- cube.add_aux_coord(delta, cube.coord_dims(coord_a))
-
- sigma = coord_from_term("b")
- surface_air_pressure = coord_from_term("ps")
- factory = HybridPressureFactory(delta, sigma, surface_air_pressure)
- elif formula_type == "ocean_sigma_z_coordinate":
- sigma = coord_from_term("sigma")
- eta = coord_from_term("eta")
- depth = coord_from_term("depth")
- depth_c = coord_from_term("depth_c")
- nsigma = coord_from_term("nsigma")
- zlev = coord_from_term("zlev")
- factory = OceanSigmaZFactory(
- sigma, eta, depth, depth_c, nsigma, zlev
- )
- elif formula_type == "ocean_sigma_coordinate":
- sigma = coord_from_term("sigma")
- eta = coord_from_term("eta")
- depth = coord_from_term("depth")
- factory = OceanSigmaFactory(sigma, eta, depth)
- elif formula_type == "ocean_s_coordinate":
- s = coord_from_term("s")
- eta = coord_from_term("eta")
- depth = coord_from_term("depth")
- a = coord_from_term("a")
- depth_c = coord_from_term("depth_c")
- b = coord_from_term("b")
- factory = OceanSFactory(s, eta, depth, a, b, depth_c)
- elif formula_type == "ocean_s_coordinate_g1":
- s = coord_from_term("s")
- c = coord_from_term("c")
- eta = coord_from_term("eta")
- depth = coord_from_term("depth")
- depth_c = coord_from_term("depth_c")
- factory = OceanSg1Factory(s, c, eta, depth, depth_c)
- elif formula_type == "ocean_s_coordinate_g2":
- s = coord_from_term("s")
- c = coord_from_term("c")
- eta = coord_from_term("eta")
- depth = coord_from_term("depth")
- depth_c = coord_from_term("depth_c")
- factory = OceanSg2Factory(s, c, eta, depth, depth_c)
- cube.add_aux_factory(factory)
-
-
-def _translate_constraints_to_var_callback(constraints):
- """
- Translate load constraints into a simple data-var filter function, if possible.
-
- Returns:
- * function(cf_var:CFDataVariable): --> bool,
- or None.
-
- For now, ONLY handles a single NameConstraint with no 'STASH' component.
-
- """
- import iris._constraints
-
- constraints = iris._constraints.list_of_constraints(constraints)
- result = None
- if len(constraints) == 1:
- (constraint,) = constraints
- if (
- isinstance(constraint, iris._constraints.NameConstraint)
- and constraint.STASH == "none"
- ):
- # As long as it doesn't use a STASH match, then we can treat it as
- # a testing against name properties of cf_var.
- # That's just like testing against name properties of a cube, except that they may not all exist.
- def inner(cf_datavar):
- match = True
- for name in constraint._names:
- expected = getattr(constraint, name)
- if name != "STASH" and expected != "none":
- attr_name = "cf_name" if name == "var_name" else name
- # Fetch property : N.B. CFVariable caches the property values
- # The use of a default here is the only difference from the code in NameConstraint.
- if not hasattr(cf_datavar, attr_name):
- continue
- actual = getattr(cf_datavar, attr_name, "")
- if actual != expected:
- match = False
- break
- return match
-
- result = inner
- return result
-
-
-def load_cubes(filenames, callback=None, constraints=None):
- """
- Loads cubes from a list of NetCDF filenames/OPeNDAP URLs.
-
- Args:
-
- * filenames (string/list):
- One or more NetCDF filenames/OPeNDAP URLs to load from.
-
- Kwargs:
-
- * callback (callable function):
- Function which can be passed on to :func:`iris.io.run_callback`.
-
- Returns:
- Generator of loaded NetCDF :class:`iris.cube.Cube`.
-
- """
- # TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded
- # into standard behaviour.
- # Deferred import to avoid circular imports.
- from iris.experimental.ugrid.cf import CFUGridReader
- from iris.experimental.ugrid.load import (
- PARSE_UGRID_ON_LOAD,
- _build_mesh_coords,
- _meshes_from_cf,
- )
- from iris.io import run_callback
-
- # Create a low-level data-var filter from the original load constraints, if they are suitable.
- var_callback = _translate_constraints_to_var_callback(constraints)
-
- # Create an actions engine.
- engine = _actions_engine()
-
- if isinstance(filenames, str):
- filenames = [filenames]
-
- for filename in filenames:
- # Ingest the netCDF file.
- meshes = {}
- if PARSE_UGRID_ON_LOAD:
- cf = CFUGridReader(filename)
- meshes = _meshes_from_cf(cf)
- else:
- cf = iris.fileformats.cf.CFReader(filename)
-
- # Process each CF data variable.
- data_variables = list(cf.cf_group.data_variables.values()) + list(
- cf.cf_group.promoted.values()
- )
- for cf_var in data_variables:
- if var_callback and not var_callback(cf_var):
- # Deliver only selected results.
- continue
-
- # cf_var-specific mesh handling, if a mesh is present.
- # Build the mesh_coords *before* loading the cube - avoids
- # mesh-related attributes being picked up by
- # _add_unused_attributes().
- mesh_name = None
- mesh = None
- mesh_coords, mesh_dim = [], None
- if PARSE_UGRID_ON_LOAD:
- mesh_name = getattr(cf_var, "mesh", None)
- if mesh_name is not None:
- try:
- mesh = meshes[mesh_name]
- except KeyError:
- message = (
- f"File does not contain mesh: '{mesh_name}' - "
- f"referenced by variable: '{cf_var.cf_name}' ."
- )
- logger.debug(message)
- if mesh is not None:
- mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var)
-
- cube = _load_cube(engine, cf, cf_var, filename)
-
- # Attach the mesh (if present) to the cube.
- for mesh_coord in mesh_coords:
- cube.add_aux_coord(mesh_coord, mesh_dim)
-
- # Process any associated formula terms and attach
- # the corresponding AuxCoordFactory.
- try:
- _load_aux_factory(engine, cube)
- except ValueError as e:
- warnings.warn("{}".format(e))
-
- # Perform any user registered callback function.
- cube = run_callback(callback, cube, cf_var, filename)
-
- # Callback mechanism may return None, which must not be yielded
- if cube is None:
- continue
-
- yield cube
-
-
def _bytes_if_ascii(string):
"""
Convert the given string to a byte string (str in py2k, bytes in py3k)
@@ -1837,7 +1277,9 @@ def _get_dim_names(self, cube_or_mesh):
"""
- def record_dimension(names_list, dim_name, length, matching_coords=[]):
+ def record_dimension(
+ names_list, dim_name, length, matching_coords=None
+ ):
"""
Record a file dimension, its length and associated "coordinates"
(which may in fact also be connectivities).
@@ -1846,6 +1288,8 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]):
matches the earlier finding.
"""
+ if matching_coords is None:
+ matching_coords = []
if dim_name not in self._existing_dim:
self._existing_dim[dim_name] = length
else:
diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py
index 7bcb451d95..7696c8e060 100644
--- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py
+++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py
@@ -15,7 +15,7 @@
import iris.fileformats._nc_load_rules.engine
from iris.fileformats.cf import CFReader
import iris.fileformats.netcdf
-from iris.fileformats.netcdf import _load_cube
+from iris.fileformats.netcdf.loader import _load_cube
from iris.tests.stock.netcdf import ncgen_from_cdl
"""
@@ -83,11 +83,11 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path):
# Grab a data variable : FOR NOW always grab the 'phenom' variable.
cf_var = cf.cf_group.data_variables["phenom"]
- engine = iris.fileformats.netcdf._actions_engine()
+ engine = iris.fileformats.netcdf.loader._actions_engine()
# If debug enabled, switch on the activation summary debug output.
# Use 'patch' so it is restored after the test.
- self.patch("iris.fileformats.netcdf.DEBUG", self.debug)
+ self.patch("iris.fileformats.netcdf.loader.DEBUG", self.debug)
with warnings.catch_warnings():
warnings.filterwarnings(
diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py
new file mode 100644
index 0000000000..7c2ae96158
--- /dev/null
+++ b/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py
@@ -0,0 +1,6 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Unit tests for the :mod:`iris.fileformats.netcdf.loader` module."""
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py
similarity index 97%
rename from lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py
rename to lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py
index 1bf39591d2..054c8e2db1 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py
@@ -16,7 +16,7 @@
from iris._lazy_data import _optimum_chunksize
import iris.fileformats.cf
-from iris.fileformats.netcdf import _get_cf_var_data
+from iris.fileformats.netcdf.loader import _get_cf_var_data
class Test__get_cf_var_data(tests.IrisTest):
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py
similarity index 99%
rename from lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py
rename to lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py
index eb9da6b5d6..841935cc81 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py
@@ -16,7 +16,7 @@
from iris.coords import DimCoord
from iris.cube import Cube
-from iris.fileformats.netcdf import _load_aux_factory
+from iris.fileformats.netcdf.loader import _load_aux_factory
class TestAtmosphereHybridSigmaPressureCoordinate(tests.IrisTest):
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py
similarity index 96%
rename from lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py
rename to lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py
index 0e98eec916..6e28a2f8e4 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py
@@ -15,7 +15,7 @@
from iris.coords import DimCoord
import iris.fileformats.cf
-from iris.fileformats.netcdf import _load_cube
+from iris.fileformats.netcdf.loader import _load_cube
class TestCoordAttributes(tests.IrisTest):
@@ -28,7 +28,7 @@ def _patcher(engine, cf, cf_group):
engine.cube_parts["coordinates"] = coordinates
def setUp(self):
- this = "iris.fileformats.netcdf._assert_case_specific_facts"
+ this = "iris.fileformats.netcdf.loader._assert_case_specific_facts"
patch = mock.patch(this, side_effect=self._patcher)
patch.start()
self.addCleanup(patch.stop)
@@ -112,7 +112,7 @@ def test_flag_pass_thru_multi(self):
class TestCubeAttributes(tests.IrisTest):
def setUp(self):
- this = "iris.fileformats.netcdf._assert_case_specific_facts"
+ this = "iris.fileformats.netcdf.loader._assert_case_specific_facts"
patch = mock.patch(this)
patch.start()
self.addCleanup(patch.stop)
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__translate_constraints_to_var_callback.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py
similarity index 97%
rename from lib/iris/tests/unit/fileformats/netcdf/test__translate_constraints_to_var_callback.py
rename to lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py
index fb08ffda2b..77bb0d3950 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test__translate_constraints_to_var_callback.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py
@@ -13,7 +13,9 @@
import iris
from iris.fileformats.cf import CFDataVariable
-from iris.fileformats.netcdf import _translate_constraints_to_var_callback
+from iris.fileformats.netcdf.loader import (
+ _translate_constraints_to_var_callback,
+)
# import iris tests first so that some things can be initialised before
# importing anything else
diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py
new file mode 100644
index 0000000000..a68d5fc5d0
--- /dev/null
+++ b/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py
@@ -0,0 +1,6 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Unit tests for the :mod:`iris.fileformats.netcdf.saver` module."""
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__FillValueMaskCheckAndStoreTarget.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py
similarity index 97%
rename from lib/iris/tests/unit/fileformats/netcdf/test__FillValueMaskCheckAndStoreTarget.py
rename to lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py
index 01ba7ff38d..77209efafc 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test__FillValueMaskCheckAndStoreTarget.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py
@@ -17,7 +17,7 @@
import numpy as np
-from iris.fileformats.netcdf import _FillValueMaskCheckAndStoreTarget
+from iris.fileformats.netcdf.saver import _FillValueMaskCheckAndStoreTarget
class Test__FillValueMaskCheckAndStoreTarget(tests.IrisTest):
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py
index 61b37fe477..a942680c81 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py
@@ -189,7 +189,7 @@ def test_big_endian(self):
def test_zlib(self):
cube = self._simple_cube(">f4")
- api = self.patch("iris.fileformats.netcdf.netCDF4")
+ api = self.patch("iris.fileformats.netcdf.saver.netCDF4")
# Define mocked default fill values to prevent deprecation warning (#4374).
api.default_fillvals = collections.defaultdict(lambda: -99.0)
with Saver("/dummy/path", "NETCDF4") as saver:
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/test_save.py
index 669a3c4137..030edbfce2 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test_save.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/test_save.py
@@ -143,7 +143,7 @@ def test_None(self):
# Test that when no fill_value argument is passed, the fill_value
# argument to Saver.write is None or not present.
cubes = self._make_cubes()
- with mock.patch("iris.fileformats.netcdf.Saver") as Saver:
+ with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver:
save(cubes, "dummy.nc")
# Get the Saver.write mock
@@ -161,7 +161,7 @@ def test_single(self):
# that value is passed to each call to Saver.write
cubes = self._make_cubes()
fill_value = 12345.0
- with mock.patch("iris.fileformats.netcdf.Saver") as Saver:
+ with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver:
save(cubes, "dummy.nc", fill_value=fill_value)
# Get the Saver.write mock
@@ -178,7 +178,7 @@ def test_multiple(self):
# each element is passed to separate calls to Saver.write
cubes = self._make_cubes()
fill_values = [123.0, 456.0, 789.0]
- with mock.patch("iris.fileformats.netcdf.Saver") as Saver:
+ with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver:
save(cubes, "dummy.nc", fill_value=fill_values)
# Get the Saver.write mock
@@ -195,7 +195,7 @@ def test_single_string(self):
# that value is passed to calls to Saver.write
cube = Cube(["abc", "def", "hij"])
fill_value = "xyz"
- with mock.patch("iris.fileformats.netcdf.Saver") as Saver:
+ with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver:
save(cube, "dummy.nc", fill_value=fill_value)
# Get the Saver.write mock
@@ -211,7 +211,7 @@ def test_multi_wrong_length(self):
# is passed as the fill_value argument, an error is raised
cubes = self._make_cubes()
fill_values = [1.0, 2.0, 3.0, 4.0]
- with mock.patch("iris.fileformats.netcdf.Saver"):
+ with mock.patch("iris.fileformats.netcdf.saver.Saver"):
with self.assertRaises(ValueError):
save(cubes, "dummy.nc", fill_value=fill_values)