From 1f918efafcee5c612c081af965902671f9bf671a Mon Sep 17 00:00:00 2001 From: Bill Little Date: Fri, 12 Feb 2021 16:16:14 +0000 Subject: [PATCH 01/22] add ugrid mesh-api stubs (#4001) --- lib/iris/experimental/ugrid.py | 431 +++++++++++++++++++++++++++++++++ 1 file changed, 431 insertions(+) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index e5420b6041..d2907bf499 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -615,6 +615,437 @@ def equal(self, other, lenient=None): return super().equal(other, lenient=lenient) +# class Mesh(CFVariableMixin): +# """ +# +# .. todo:: +# +# .. questions:: +# +# - decide on the verbose/succinct version of __str__ vs __repr__ +# +# .. notes:: +# +# - the mesh is location agnostic +# +# - no need to support volume at mesh level, yet +# +# - topology_dimension +# - use for fast equality between Mesh instances +# - checking connectivity dimensionality, specifically the highest dimensonality of the +# "geometric element" being added i.e., reference the src_location/tgt_location +# - used to honour and enforce the minimum UGRID connectivity contract +# +# - support pickling +# +# - copy is off the table!! +# +# - MeshCoord.guess_points() +# +# - don't provide public methods to return the coordinate and connectivity +# managers +# +# """ +# def __init__( +# self, +# topology_dimension, +# standard_name=None, +# long_name=None, +# var_name=None, +# units=None, +# attributes=None, +# node_dimension=None, +# edge_dimension=None, +# face_dimension=None, +# node_coords_and_axes=None, # [(coord, "x"), (coord, "y")] this is a stronger contract, not relying on guessing +# edge_coords_and_axes=None, # ditto +# face_coords_and_axes=None, # ditto +# connectivities=None, # [Connectivity, [Connectivity], ...] +# ): +# # TODO: support volumes. +# # TODO: support (coord, "z") +# +# # These are strings, if None is provided then assign the default string. +# self.node_dimension = node_dimension +# self.edge_dimension = edge_dimension +# self.face_dimension = face_dimension +# +# self._metadata_manager = metadata_manager_factory(MeshMetadata) +# +# self._metadata_manager.topology_dimension = topology_dimension +# +# self.standard_name = standard_name +# self.long_name = long_name +# self.var_name = var_name +# self.units = units +# self.attributes = attributes +# +# # based on the topology_dimension create the appropriate coordinate manager +# # with some intelligence +# self._coord_manager = ... +# +# # based on the topology_dimension create the appropriate connectivity manager +# # with some intelligence +# self._connectivity_manager = ... +# +# @property +# def all_coords(self): +# # coords = mesh.all_coords +# # coords.face_x, coords.edge_y +# pass +# +# @property +# def node_coords(self): +# # perhaps return a namedtuple? +# # this would give: +# # node_coords = mesh.node_coords +# # node_coords.x +# # node_coords.y +# pass +# +# @property +# def edge_coords(self): +# # as above +# pass +# +# @property +# def face_coords(self): +# # as above +# pass +# +# @property +# def all_connectivities(self): +# # conns = mesh.all_connectivities +# # conns.edge_node, conns.boundary_node +# pass +# +# @property +# def face_node_connectivity(self): +# # required +# return self._connectivity_manager.face_node +# +# @property +# def edge_node_connectivity(self): +# # optionally required +# return self._connectivity_manager.edge_node +# +# @property +# def face_edge_connectivity(self): +# # optional +# return self._connectivity_manager.face_edge +# +# @property +# def face_face_connectivity(self): +# # optional +# return self._connectivity_manager.face_face +# +# @property +# def edge_face_connectivity(self): +# # optional +# return self._connectivity_manager.edge_face +# +# @property +# def boundary_node_connectivity(self): +# # optional +# return self._connectivity_manager.boundard_node +# +# def coord(self, ...): +# # as Cube.coord i.e., ensure that one and only one coord-like is returned +# # otherwise raise and exception +# pass +# +# def coords( +# self, +# name_or_coord=None, +# standard_name=None, +# long_name=None, +# var_name=None, +# attributes=None, +# axis=None, +# node=False, +# edge=False, +# face=False, +# ): +# # do we support the coord_system kwargs? +# self._coord_manager.coords(...) +# +# def connectivity(self, ...): +# pass +# +# def connectivities( +# self, +# name_or_coord=None, +# standard_name=None, +# long_name=None, +# var_name=None, +# attributes=None, +# node=False, +# edge=False, +# face=False, +# ): +# pass +# +# def add_coords(self, node_x=None, node_y=None, edge_x=None, edge_y=None, face_x=None, face_y=None): +# # this supports add a new coord to the manager, but also replacing an exiting coord +# self._coord_manager.add(...) +# +# def add_connectivities(self, *args): +# # this supports add a new connectivity to the manager, but also replacing an exiting connectivity +# self._connectivity_manager.add(*args) +# +# def remove_coords(self, ...): +# # could prove the "name", "metadata", "coord"-instance +# # this could use mesh.coords() to find the coords +# self._coord_manager.remove(...) +# +# def remove_connectivities(self, ...): +# # needs to respect the minimum UGRID contract +# self._connectivity_manager.remove(...) +# +# def __eq__(self, other): +# # Full equality could be MASSIVE, so we want to avoid that. +# # Ideally we want a mesh signature from LFRic for comparison, although this would +# # limit Iris' relevance outside MO. +# # TL;DR: unknown quantity. +# raise NotImplemented +# +# def __ne__(self, other): +# # See __eq__ +# raise NotImplemented +# +# def __str__(self): +# pass +# +# def __repr__(self): +# pass +# +# def __unicode__(self, ...): +# pass +# +# def xml_element(self): +# pass +# +# # the MeshCoord will always have bounds, perhaps points. However the MeshCoord.guess_points() may +# # be a very useful part of its behaviour. +# # after using MeshCoord.guess_points(), the user may wish to add the associated MeshCoord.points into +# # the Mesh as face_coordinates. +# +# def to_MeshCoord(self, location, axis): +# # return MeshCoord(..., location=location, axis=axis) +# # use Connectivity.indices_by_src() for fetching indices. +# +# def to_MeshCoords(self, location): +# # return MeshCoord(..., location=location, axis="x"), MeshCoord(..., location=location, axis="y") +# # use Connectivity.indices_by_src() for fetching indices. +# +# def dimension_names_reset(self, node=False, face=False, edge=False): +# # reset to defaults like this (suggestion) +# +# def dimension_names(self, node=None, face=None, edge=None): +# # e.g., only set self.node iff node != None. these attributes will +# # always be set to a user provided string or the default string. +# # return a namedtuple of dict-like +# +# @property +# def cf_role(self): +# return "mesh_topology" +# +# @property +# def topology_dimension(self): +# """ +# read-only +# +# """ +# return self._metadata_manager.topology_dimension +# +# +# class MeshMetadata(BaseMetadata): +# """ +# .. notes:: +# +# - topology_dimension is treated strictly in both +# strict and lenient modes, and does participate in __eq__ +# """ +# _members = "topology_dimension" +# +# +# # +# # - validate coord_systems +# # - validate climatological +# # - use guess_coord_axis (iris.utils) +# # - others? +# # +# class _Mesh1DCoordinateManager: +# REQUIRED = ( +# "node_x", +# "node_y", +# ) +# OPTIONAL = ( +# "edge_x", +# "edge_y", +# ) +# def __init__(self, node_x, node_y, edge_x=None, edge_y=None): +# # required +# self.node_x = node_x +# self.node_y = node_y +# # optional +# self.edge_x = edge_x +# self.edge_y = edge_y +# +# # WOO-GA - this can easily get out of sync with the self attributes. +# # choose the container wisely e.g., could be an dict..., also the self +# # attributes may need to be @property's that access the chosen _members container +# self._members = [ ... ] +# +# def __iter__(self): +# for member in self._members: +# yield member +# +# def coord(self, **kwargs): +# # see Cube.coord for pattern, checking for a single result +# return self.coords(**kwargs)[0] +# +# def coords(self, ...): +# # see Cube.coords for relevant patterns +# # return [ ... ] +# pass +# +# def add(self, **kwargs): +# pass +# +# def remove(self, ...): +# # needs to respect the minimum UGRID contract +# # use logging/warning to flag items not removed - highlight in doc-string +# # don't raise an exception +# +# def __str__(self): +# pass +# +# def __repr__(self): +# pass +# +# def __eq__(self, other): +# # Full equality could be MASSIVE, so we want to avoid that. +# # Ideally we want a mesh signature from LFRic for comparison, although this would +# # limit Iris' relevance outside MO. +# # TL;DR: unknown quantity. +# raise NotImplemented +# +# def __ne__(self, other): +# # See __eq__ +# raise NotImplemented +# +# +# class _Mesh2DCoordinateManager(_Mesh1DCoordinateManager): +# OPTIONAL = ( +# "edge_x", +# "edge_y", +# "face_x", +# "face_y", +# ) +# def __init__(self, node_x, node_y, edge_x=None, edge_y=None, face_x=None, face_y=None): +# # optional +# self.face_x = face_x +# self.face_y = face_y +# +# super().__init__(node_x, node_y, edge_x=edge_x, edge_y=edge_y) +# +# # does the order matter? +# self._members.extend([self.face_x, self.face_y]) +# +# +# # keep an eye on the __init__ inheritance +# class _Mesh1DConnectivityManager: +# REQUIRED = ( +# "edge_node", +# ) +# OPTIONAL = () +# def __init__(self, edge_node): +# # required +# self.edge_node = edge_node +# +# # WOO-GA - this can easily get out of sync with the self attributes. +# # choose the container wisely e.g., could be an dict..., also the self +# # attributes may need to be @property's that access the chosen _members container +# +# # is this a list? as dict? a namedtuple? use case is self.add() +# self._members = [] +# +# if self.edge_node is not None: +# self._members.append(self.edge_node) +# +# def __iter__(self): +# for member in self._members: +# yield member +# +# def connectivity(self, **kwargs): +# # see Cube.coord for pattern, checking for a single result +# return self.connectivities(**kwargs)[0] +# +# def connectivities(self, ...): +# # see Cube.coords for relevant patterns +# # return [ ... ] +# pass +# +# def add(self, *args): +# # loop thru args and add (clobber) +# # adopt same philosophy as remove for adding connectivites with unsupported cf-role +# pass +# +# def remove(self, ...): +# # needs to respect the minimum UGRID contract +# # use logging/warning to flag items not removed - highlight in doc-string +# # don't raise an exception +# +# def __str__(self): +# pass +# +# def __repr__(self): +# pass +# +# def __eq__(self, other): +# # Full equality could be MASSIVE, so we want to avoid that. +# # Ideally we want a mesh signature from LFRic for comparison, although this would +# # limit Iris' relevance outside MO. +# # TL;DR: unknown quantity. +# raise NotImplemented +# +# def __ne__(self, other): +# # See __eq__ +# raise NotImplemented +# +# +# class _Mesh2DConnectivityManager(_Mesh1DConnectivityManager): +# REQUIRED = ( +# "face_node", +# ) +# OPTIONAL = ( +# "edge_node", +# "face_edge", +# "face_face", +# "edge_face", +# "boundary_node", +# ) +# def __init__(self, face_node, edge_node=None, face_edge=None, face_face=None, edge_face=None, boundary_node=None): +# # required +# self.face_node = face_node +# self._members = [self.face_node] +# +# # optionally required +# self.edge_node = edge_node +# # optional +# self.face_edge = face_edge +# self.face_face = face_face +# self.edge_face = edge_face +# self.boundary_node = boundary_node +# +# # edge_node could be None here. are we okay with this pattern? +# super().__init__(edge_node) +# +# # does order matter? +# self._members.extend([member for member in self.OPTIONAL if member is not None and member != "edge_node"]) + + #: Convenience collection of lenient metadata combine services. SERVICES_COMBINE.append(ConnectivityMetadata.combine) SERVICES.append(ConnectivityMetadata.combine) From 36a93b09daeca263ba0b57acc90cc81d0578f741 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 15 Feb 2021 12:12:26 +0000 Subject: [PATCH 02/22] add additional mesh stubs (#4005) --- lib/iris/experimental/ugrid.py | 40 +++++++++++++++++++++++++++++----- 1 file changed, 35 insertions(+), 5 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index d2907bf499..994671c8b9 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -641,6 +641,7 @@ def equal(self, other, lenient=None): # - copy is off the table!! # # - MeshCoord.guess_points() +# - MeshCoord.to_AuxCoord() # # - don't provide public methods to return the coordinate and connectivity # managers @@ -690,14 +691,14 @@ def equal(self, other, lenient=None): # # @property # def all_coords(self): +# # return a namedtuple # # coords = mesh.all_coords # # coords.face_x, coords.edge_y # pass # # @property # def node_coords(self): -# # perhaps return a namedtuple? -# # this would give: +# # return a namedtuple # # node_coords = mesh.node_coords # # node_coords.x # # node_coords.y @@ -715,6 +716,7 @@ def equal(self, other, lenient=None): # # @property # def all_connectivities(self): +# # return a namedtuple # # conns = mesh.all_connectivities # # conns.edge_node, conns.boundary_node # pass @@ -786,15 +788,15 @@ def equal(self, other, lenient=None): # pass # # def add_coords(self, node_x=None, node_y=None, edge_x=None, edge_y=None, face_x=None, face_y=None): -# # this supports add a new coord to the manager, but also replacing an exiting coord +# # this supports adding a new coord to the manager, but also replacing an existing coord # self._coord_manager.add(...) # # def add_connectivities(self, *args): -# # this supports add a new connectivity to the manager, but also replacing an exiting connectivity +# # this supports adding a new connectivity to the manager, but also replacing an existing connectivity # self._connectivity_manager.add(*args) # # def remove_coords(self, ...): -# # could prove the "name", "metadata", "coord"-instance +# # could provide the "name", "metadata", "coord"-instance # # this could use mesh.coords() to find the coords # self._coord_manager.remove(...) # @@ -822,6 +824,12 @@ def equal(self, other, lenient=None): # def __unicode__(self, ...): # pass # +# def __getstate__(self): +# pass +# +# def __setstate__(self, state): +# pass +# # def xml_element(self): # pass # @@ -830,11 +838,21 @@ def equal(self, other, lenient=None): # # after using MeshCoord.guess_points(), the user may wish to add the associated MeshCoord.points into # # the Mesh as face_coordinates. # +# def to_AuxCoord(self, location, axis): +# # factory method +# # return the lazy AuxCoord(...) for the given location and axis +# +# def to_AuxCoords(self, location): +# # factory method +# # return the lazy AuxCoord(...), AuxCoord(...) +# # def to_MeshCoord(self, location, axis): +# # factory method # # return MeshCoord(..., location=location, axis=axis) # # use Connectivity.indices_by_src() for fetching indices. # # def to_MeshCoords(self, location): +# # factory method # # return MeshCoord(..., location=location, axis="x"), MeshCoord(..., location=location, axis="y") # # use Connectivity.indices_by_src() for fetching indices. # @@ -901,6 +919,12 @@ def equal(self, other, lenient=None): # for member in self._members: # yield member # +# def __getstate__(self): +# pass +# +# def __setstate__(self, state): +# pass +# # def coord(self, **kwargs): # # see Cube.coord for pattern, checking for a single result # return self.coords(**kwargs)[0] @@ -978,6 +1002,12 @@ def equal(self, other, lenient=None): # for member in self._members: # yield member # +# def __getstate__(self): +# pass +# +# def __setstate__(self, state): +# pass +# # def connectivity(self, **kwargs): # # see Cube.coord for pattern, checking for a single result # return self.connectivities(**kwargs)[0] From 04ec3b2b299495743920a2d8b036e1736be8ad75 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 15 Feb 2021 14:00:52 +0000 Subject: [PATCH 03/22] Update mesh-data-model branch (#4009) (#4011) * Add abstract cube summary (#3987) Co-authored-by: stephen.worsley * add nox session conda list (#3990) * Added text to state the Python version used to build the docs. (#3989) * Added text to state the Python version used to build the docs. * Added footer template that includes the Python version used to build. * added new line * Review actions * added whatsnew * Iris py38 (#3976) * support for py38 * update CI and noxfile * enforce alphabetical xml element attribute order * full tests for py38 + fix docs-tests * add whatsnew entry * update doc-strings + review actions * Alternate xml handling routine (#29) * all xml tests pass for nox tests-3.8 * restored docstrings * move sort_xml_attrs * make sort_xml_attrs a classmethod * update sort_xml_attr doc-string Co-authored-by: Bill Little * add jamesp to whatsnew + minor tweak Co-authored-by: James Penn * normalise version to implicit development release number (#3991) * Gallery: update COP maps example (#3934) * update cop maps example * comment tweaks * minor comment tweak + whatsnew * reinstate whatsnew addition * remove duplicate whatsnew * don't support mpl v1.2 (#3941) * Cubesummary tidy (#3988) * Extra tests; fix for array attributes. * Docstring for CubeSummary, and remove some unused parts. * Fix section name capitalisation, in line with existing cube summary. * Handle array differences; quote strings in extras and if 'awkward'-printing. * Ensure scalar string coord 'content' prints on one line. * update intersphinx mapping and matplotlib urls (#4003) * update intersphinx mapping and matplotlib urls * use matplotlib intersphinx where possible * review actions * review actions * update readme badges (#4004) * update readme badges * pimp twitter badge * update readme logo img src and href (#4006) * update setuptools description (#4008) Co-authored-by: Patrick Peglar Co-authored-by: stephen.worsley Co-authored-by: tkknight <2108488+tkknight@users.noreply.github.com> Co-authored-by: James Penn Co-authored-by: Ruth Comer Co-authored-by: Patrick Peglar Co-authored-by: stephen.worsley Co-authored-by: tkknight <2108488+tkknight@users.noreply.github.com> Co-authored-by: James Penn Co-authored-by: Ruth Comer --- README.md | 24 +-- .../general/plot_anomaly_log_colouring.py | 13 +- .../gallery_code/meteorology/plot_COP_maps.py | 134 +++++++--------- .../meteorology/plot_deriving_phenomena.py | 9 +- docs/src/common_links.inc | 2 +- docs/src/conf.py | 12 +- docs/src/whatsnew/3.0.1.rst | 21 ++- docs/src/whatsnew/3.0.rst | 21 ++- docs/src/whatsnew/latest.rst | 12 +- lib/iris/_representation.py | 72 +++++++-- .../representation/test_representation.py | 149 ++++++++++++++++-- setup.py | 2 +- 12 files changed, 307 insertions(+), 164 deletions(-) diff --git a/README.md b/README.md index 0ceac7e089..e460f4a01a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@

- - Iris
+ + Iris

@@ -13,18 +13,24 @@ Cirrus-CI - -Documentation Status + +Documentation Status conda-forge downloads # contributors + +conda-forge + +pypi -Latest version +latest release Commits since last release @@ -35,8 +41,8 @@ black -twitter +twitter scitools_iris

diff --git a/docs/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py index 778f92db1b..846816aff7 100644 --- a/docs/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -12,18 +12,15 @@ "zero band" which is plotted in white. To do this, we create a custom value mapping function (normalization) using -the matplotlib Norm class `matplotlib.colours.SymLogNorm -`_. -We use this to make a cell-filled pseudocolour plot with a colorbar. +the matplotlib Norm class :obj:`matplotlib.colors.SymLogNorm`. +We use this to make a cell-filled pseudocolor plot with a colorbar. NOTE: By "pseudocolour", we mean that each data point is drawn as a "cell" region on the plot, coloured according to its data value. This is provided in Iris by the functions :meth:`iris.plot.pcolor` and :meth:`iris.plot.pcolormesh`, which call the underlying matplotlib -functions of the same names (i.e. `matplotlib.pyplot.pcolor -`_ -and `matplotlib.pyplot.pcolormesh -`_). +functions of the same names (i.e., :obj:`matplotlib.pyplot.pcolor` +and :obj:`matplotlib.pyplot.pcolormesh`). See also: http://en.wikipedia.org/wiki/False_color#Pseudocolor. """ @@ -65,7 +62,7 @@ def main(): # Use a standard colour map which varies blue-white-red. # For suitable options, see the 'Diverging colormaps' section in: - # http://matplotlib.org/examples/color/colormaps_reference.html + # http://matplotlib.org/stable/gallery/color/colormap_reference.html anom_cmap = "bwr" # Create a 'logarithmic' data normalization. diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 5555a0b85c..5e158346a9 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -38,34 +38,32 @@ def cop_metadata_callback(cube, field, filename): filename. """ - # Extract the experiment name (such as a1b or e1) from the filename (in - # this case it is just the parent folder's name) - containing_folder = os.path.dirname(filename) - experiment_label = os.path.basename(containing_folder) + # Extract the experiment name (such as A1B or E1) from the filename (in + # this case it is just the start of the file name, before the first "."). + fname = os.path.basename(filename) # filename without path. + experiment_label = fname.split(".")[0] - # Create a coordinate with the experiment label in it + # Create a coordinate with the experiment label in it... exp_coord = coords.AuxCoord( experiment_label, long_name="Experiment", units="no_unit" ) - # and add it to the cube + # ...and add it to the cube. cube.add_aux_coord(exp_coord) def main(): - # Load e1 and a1 using the callback to update the metadata - e1 = iris.load_cube( - iris.sample_data_path("E1.2098.pp"), callback=cop_metadata_callback - ) - a1b = iris.load_cube( - iris.sample_data_path("A1B.2098.pp"), callback=cop_metadata_callback - ) + # Load E1 and A1B scenarios using the callback to update the metadata. + scenario_files = [ + iris.sample_data_path(fname) for fname in ["E1.2098.pp", "A1B.2098.pp"] + ] + scenarios = iris.load(scenario_files, callback=cop_metadata_callback) - # Load the global average data and add an 'Experiment' coord it - global_avg = iris.load_cube(iris.sample_data_path("pre-industrial.pp")) + # Load the preindustrial reference data. + preindustrial = iris.load_cube(iris.sample_data_path("pre-industrial.pp")) # Define evenly spaced contour levels: -2.5, -1.5, ... 15.5, 16.5 with the - # specific colours + # specific colours. levels = np.arange(20) - 2.5 red = ( np.array( @@ -147,81 +145,67 @@ def main(): ) # Put those colours into an array which can be passed to contourf as the - # specific colours for each level - colors = np.array([red, green, blue]).T + # specific colours for each level. + colors = np.stack([red, green, blue], axis=1) - # Subtract the global + # Make a wider than normal figure to house two maps side-by-side. + fig, ax_array = plt.subplots(1, 2, figsize=(12, 5)) - # Iterate over each latitude longitude slice for both e1 and a1b scenarios - # simultaneously - for e1_slice, a1b_slice in zip( - e1.slices(["latitude", "longitude"]), - a1b.slices(["latitude", "longitude"]), + # Loop over our scenarios to make a plot for each. + for ax, experiment, label in zip( + ax_array, ["E1", "A1B"], ["E1", "A1B-Image"] ): - - time_coord = a1b_slice.coord("time") - - # Calculate the difference from the mean - delta_e1 = e1_slice - global_avg - delta_a1b = a1b_slice - global_avg - - # Make a wider than normal figure to house two maps side-by-side - fig = plt.figure(figsize=(12, 5)) - - # Get the time datetime from the coordinate - time = time_coord.units.num2date(time_coord.points[0]) - # Set a title for the entire figure, giving the time in a nice format - # of "MonthName Year". Also, set the y value for the title so that it - # is not tight to the top of the plot. - fig.suptitle( - "Annual Temperature Predictions for " + time.strftime("%Y"), - y=0.9, - fontsize=18, + exp_cube = scenarios.extract_cube( + iris.Constraint(Experiment=experiment) ) + time_coord = exp_cube.coord("time") - # Add the first subplot showing the E1 scenario - plt.subplot(121) - plt.title("HadGEM2 E1 Scenario", fontsize=10) - iplt.contourf(delta_e1, levels, colors=colors, extend="both") - plt.gca().coastlines() - # get the current axes' subplot for use later on - plt1_ax = plt.gca() + # Calculate the difference from the preindustial control run. + exp_anom_cube = exp_cube - preindustrial - # Add the second subplot showing the A1B scenario - plt.subplot(122) - plt.title("HadGEM2 A1B-Image Scenario", fontsize=10) + # Plot this anomaly. + plt.sca(ax) + ax.set_title(f"HadGEM2 {label} Scenario", fontsize=10) contour_result = iplt.contourf( - delta_a1b, levels, colors=colors, extend="both" + exp_anom_cube, levels, colors=colors, extend="both" ) plt.gca().coastlines() - # get the current axes' subplot for use later on - plt2_ax = plt.gca() - # Now add a colourbar who's leftmost point is the same as the leftmost - # point of the left hand plot and rightmost point is the rightmost - # point of the right hand plot + # Now add a colourbar who's leftmost point is the same as the leftmost + # point of the left hand plot and rightmost point is the rightmost + # point of the right hand plot. - # Get the positions of the 2nd plot and the left position of the 1st - # plot - left, bottom, width, height = plt2_ax.get_position().bounds - first_plot_left = plt1_ax.get_position().bounds[0] + # Get the positions of the 2nd plot and the left position of the 1st plot. + left, bottom, width, height = ax_array[1].get_position().bounds + first_plot_left = ax_array[0].get_position().bounds[0] - # the width of the colorbar should now be simple - width = left - first_plot_left + width + # The width of the colorbar should now be simple. + width = left - first_plot_left + width - # Add axes to the figure, to place the colour bar - colorbar_axes = fig.add_axes([first_plot_left, 0.18, width, 0.03]) + # Add axes to the figure, to place the colour bar. + colorbar_axes = fig.add_axes([first_plot_left, 0.18, width, 0.03]) - # Add the colour bar - cbar = plt.colorbar( - contour_result, colorbar_axes, orientation="horizontal" - ) + # Add the colour bar. + cbar = plt.colorbar( + contour_result, colorbar_axes, orientation="horizontal" + ) - # Label the colour bar and add ticks - cbar.set_label(e1_slice.units) - cbar.ax.tick_params(length=0) + # Label the colour bar and add ticks. + cbar.set_label(preindustrial.units) + cbar.ax.tick_params(length=0) + + # Get the time datetime from the coordinate. + time = time_coord.units.num2date(time_coord.points[0]) + # Set a title for the entire figure, using the year from the datetime + # object. Also, set the y value for the title so that it is not tight to + # the top of the plot. + fig.suptitle( + f"Annual Temperature Predictions for {time.year}", + y=0.9, + fontsize=18, + ) - iplt.show() + iplt.show() if __name__ == "__main__": diff --git a/docs/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py index 0bb1fa53a4..b600941f35 100644 --- a/docs/gallery_code/meteorology/plot_deriving_phenomena.py +++ b/docs/gallery_code/meteorology/plot_deriving_phenomena.py @@ -26,14 +26,7 @@ def limit_colorbar_ticks(contour_object): number of ticks on the colorbar to 4. """ - # Under Matplotlib v1.2.x the colorbar attribute of a contour object is - # a tuple containing the colorbar and an axes object, whereas under - # Matplotlib v1.3.x it is simply the colorbar. - try: - colorbar = contour_object.colorbar[0] - except (AttributeError, TypeError): - colorbar = contour_object.colorbar - + colorbar = contour_object.colorbar colorbar.locator = matplotlib.ticker.MaxNLocator(4) colorbar.update_ticks() diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index 157444d65d..3c465b67dc 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -18,7 +18,7 @@ .. _issue: https://github.com/SciTools/iris/issues .. _issues: https://github.com/SciTools/iris/issues .. _legacy documentation: https://scitools.org.uk/iris/docs/v2.4.0/ -.. _matplotlib: https://matplotlib.org/ +.. _matplotlib: https://matplotlib.org/stable/ .. _napolean: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html .. _nox: https://nox.thea.codes/en/stable/ .. _New Issue: https://github.com/scitools/iris/issues/new/choose diff --git a/docs/src/conf.py b/docs/src/conf.py index 843af17944..9bab5850b8 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -184,18 +184,18 @@ def autolog(message): # -- intersphinx extension ---------------------------------------------------- # See https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html intersphinx_mapping = { - "cartopy": ("http://scitools.org.uk/cartopy/docs/latest/", None), - "matplotlib": ("http://matplotlib.org/", None), - "numpy": ("http://docs.scipy.org/doc/numpy/", None), - "python": ("http://docs.python.org/2.7", None), - "scipy": ("http://docs.scipy.org/doc/scipy/reference/", None), + "cartopy": ("https://scitools.org.uk/cartopy/docs/latest/", None), + "matplotlib": ("https://matplotlib.org/stable/", None), + "numpy": ("https://numpy.org/doc/stable/", None), + "python": ("https://docs.python.org/3/", None), + "scipy": ("https://docs.scipy.org/doc/scipy/reference/", None), } # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # -- plot_directive extension ------------------------------------------------- -# See https://matplotlib.org/3.1.3/devel/plot_directive.html#options +# See https://matplotlib.org/stable/api/sphinxext_plot_directive_api.html#options plot_formats = [ ("png", 100), ] diff --git a/docs/src/whatsnew/3.0.1.rst b/docs/src/whatsnew/3.0.1.rst index 163fe4ff3e..05bf41ce18 100644 --- a/docs/src/whatsnew/3.0.1.rst +++ b/docs/src/whatsnew/3.0.1.rst @@ -167,12 +167,12 @@ This document explains the changes made to Iris for this release ``volume`` are the only accepted values. (:pull:`3533`) #. `@trexfeathers`_ set **all** plot types in :mod:`iris.plot` to now use - `matplotlib.dates.date2num`_ to format date/time coordinates for use on a plot + :obj:`matplotlib.dates.date2num` to format date/time coordinates for use on a plot axis (previously :meth:`~iris.plot.pcolor` and :meth:`~iris.plot.pcolormesh` did not include this behaviour). (:pull:`3762`) #. `@trexfeathers`_ changed date/time axis labels in :mod:`iris.quickplot` to - now **always** be based on the ``epoch`` used in `matplotlib.dates.date2num`_ + now **always** be based on the ``epoch`` used in :obj:`matplotlib.dates.date2num` (previously would take the unit from a time coordinate, if present, even though the coordinate's value had been changed via ``date2num``). (:pull:`3762`) @@ -189,7 +189,7 @@ This document explains the changes made to Iris for this release #. `@stephenworsley`_ changed the way tick labels are assigned from string coords. Previously, the first tick label would occasionally be duplicated. This also - removes the use of Matplotlib's deprecated ``IndexFormatter``. (:pull:`3857`) + removes the use of the deprecated `matplotlib`_ ``IndexFormatter``. (:pull:`3857`) #. `@znicholls`_ fixed :meth:`~iris.quickplot._title` to only check ``units.is_time_reference`` if the ``units`` symbol is not used. (:pull:`3902`) @@ -295,11 +295,11 @@ This document explains the changes made to Iris for this release #. `@stephenworsley`_ and `@trexfeathers`_ pinned Iris to require `Cartopy`_ ``>=0.18``, in order to remain compatible with the latest version - of `Matplotlib`_. (:pull:`3762`) + of `matplotlib`_. (:pull:`3762`) -#. `@bjlittle`_ unpinned Iris to use the latest version of `Matplotlib`_. +#. `@bjlittle`_ unpinned Iris to use the latest version of `matplotlib`_. Supporting ``Iris`` for both ``Python2`` and ``Python3`` had resulted in - pinning our dependency on `Matplotlib`_ at ``v2.x``. But this is no longer + pinning our dependency on `matplotlib`_ at ``v2.x``. But this is no longer necessary now that ``Python2`` support has been dropped. (:pull:`3468`) #. `@stephenworsley`_ and `@trexfeathers`_ unpinned Iris to use the latest version @@ -422,11 +422,11 @@ This document explains the changes made to Iris for this release grid-line spacing in `Cartopy`_. (:pull:`3762`) (see also `Cartopy#1117`_) #. `@trexfeathers`_ added additional acceptable graphics test targets to account - for very minor changes in `Matplotlib`_ version ``3.3`` (colormaps, fonts and + for very minor changes in `matplotlib`_ version ``3.3`` (colormaps, fonts and axes borders). (:pull:`3762`) -#. `@rcomer`_ corrected the Matplotlib backend in Iris tests to ignore - `matplotlib.rcdefaults`_, instead the tests will **always** use ``agg``. +#. `@rcomer`_ corrected the `matplotlib`_ backend in Iris tests to ignore + :obj:`matplotlib.rcdefaults`, instead the tests will **always** use ``agg``. (:pull:`3846`) #. `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``. @@ -470,7 +470,6 @@ This document explains the changes made to Iris for this release with `flake8`_ and `black`_. (:pull:`3928`) .. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/ -.. _Matplotlib: https://matplotlib.org/ .. _CF units rules: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#units .. _CF Ancillary Data: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#ancillary-data .. _Quality Flags: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#flags @@ -480,7 +479,6 @@ This document explains the changes made to Iris for this release .. _Cartopy#1105: https://github.com/SciTools/cartopy/pull/1105 .. _Cartopy#1117: https://github.com/SciTools/cartopy/pull/1117 .. _Dask: https://github.com/dask/dask -.. _matplotlib.dates.date2num: https://matplotlib.org/api/dates_api.html#matplotlib.dates.date2num .. _Proj: https://github.com/OSGeo/PROJ .. _black: https://black.readthedocs.io/en/stable/ .. _Proj#1292: https://github.com/OSGeo/PROJ/pull/1292 @@ -510,7 +508,6 @@ This document explains the changes made to Iris for this release .. _numpy: https://github.com/numpy/numpy .. _xxHash: https://github.com/Cyan4973/xxHash .. _PyKE: https://pypi.org/project/scitools-pyke/ -.. _matplotlib.rcdefaults: https://matplotlib.org/3.1.1/api/matplotlib_configuration_api.html?highlight=rcdefaults#matplotlib.rcdefaults .. _@owena11: https://github.com/owena11 .. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _readthedocs: https://readthedocs.org/ diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst index 0f61d62033..7fdc2e3400 100644 --- a/docs/src/whatsnew/3.0.rst +++ b/docs/src/whatsnew/3.0.rst @@ -150,12 +150,12 @@ This document explains the changes made to Iris for this release ``volume`` are the only accepted values. (:pull:`3533`) #. `@trexfeathers`_ set **all** plot types in :mod:`iris.plot` to now use - `matplotlib.dates.date2num`_ to format date/time coordinates for use on a plot + :obj:`matplotlib.dates.date2num` to format date/time coordinates for use on a plot axis (previously :meth:`~iris.plot.pcolor` and :meth:`~iris.plot.pcolormesh` did not include this behaviour). (:pull:`3762`) #. `@trexfeathers`_ changed date/time axis labels in :mod:`iris.quickplot` to - now **always** be based on the ``epoch`` used in `matplotlib.dates.date2num`_ + now **always** be based on the ``epoch`` used in :obj:`matplotlib.dates.date2num` (previously would take the unit from a time coordinate, if present, even though the coordinate's value had been changed via ``date2num``). (:pull:`3762`) @@ -172,7 +172,7 @@ This document explains the changes made to Iris for this release #. `@stephenworsley`_ changed the way tick labels are assigned from string coords. Previously, the first tick label would occasionally be duplicated. This also - removes the use of Matplotlib's deprecated ``IndexFormatter``. (:pull:`3857`) + removes the use of the deprecated `matplotlib`_ ``IndexFormatter``. (:pull:`3857`) #. `@znicholls`_ fixed :meth:`~iris.quickplot._title` to only check ``units.is_time_reference`` if the ``units`` symbol is not used. (:pull:`3902`) @@ -278,11 +278,11 @@ This document explains the changes made to Iris for this release #. `@stephenworsley`_ and `@trexfeathers`_ pinned Iris to require `Cartopy`_ ``>=0.18``, in order to remain compatible with the latest version - of `Matplotlib`_. (:pull:`3762`) + of `matplotlib`_. (:pull:`3762`) -#. `@bjlittle`_ unpinned Iris to use the latest version of `Matplotlib`_. +#. `@bjlittle`_ unpinned Iris to use the latest version of `matplotlib`_. Supporting ``Iris`` for both ``Python2`` and ``Python3`` had resulted in - pinning our dependency on `Matplotlib`_ at ``v2.x``. But this is no longer + pinning our dependency on `matplotlib`_ at ``v2.x``. But this is no longer necessary now that ``Python2`` support has been dropped. (:pull:`3468`) #. `@stephenworsley`_ and `@trexfeathers`_ unpinned Iris to use the latest version @@ -405,11 +405,11 @@ This document explains the changes made to Iris for this release grid-line spacing in `Cartopy`_. (:pull:`3762`) (see also `Cartopy#1117`_) #. `@trexfeathers`_ added additional acceptable graphics test targets to account - for very minor changes in `Matplotlib`_ version ``3.3`` (colormaps, fonts and + for very minor changes in `matplotlib`_ version ``3.3`` (colormaps, fonts and axes borders). (:pull:`3762`) -#. `@rcomer`_ corrected the Matplotlib backend in Iris tests to ignore - `matplotlib.rcdefaults`_, instead the tests will **always** use ``agg``. +#. `@rcomer`_ corrected the `matplotlib`_ backend in Iris tests to ignore + :obj:`matplotlib.rcdefaults`, instead the tests will **always** use ``agg``. (:pull:`3846`) #. `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``. @@ -453,7 +453,6 @@ This document explains the changes made to Iris for this release with `flake8`_ and `black`_. (:pull:`3928`) .. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/ -.. _Matplotlib: https://matplotlib.org/ .. _CF units rules: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#units .. _CF Ancillary Data: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#ancillary-data .. _Quality Flags: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#flags @@ -463,7 +462,6 @@ This document explains the changes made to Iris for this release .. _Cartopy#1105: https://github.com/SciTools/cartopy/pull/1105 .. _Cartopy#1117: https://github.com/SciTools/cartopy/pull/1117 .. _Dask: https://github.com/dask/dask -.. _matplotlib.dates.date2num: https://matplotlib.org/api/dates_api.html#matplotlib.dates.date2num .. _Proj: https://github.com/OSGeo/PROJ .. _black: https://black.readthedocs.io/en/stable/ .. _Proj#1292: https://github.com/OSGeo/PROJ/pull/1292 @@ -493,7 +491,6 @@ This document explains the changes made to Iris for this release .. _numpy: https://github.com/numpy/numpy .. _xxHash: https://github.com/Cyan4973/xxHash .. _PyKE: https://pypi.org/project/scitools-pyke/ -.. _matplotlib.rcdefaults: https://matplotlib.org/3.1.1/api/matplotlib_configuration_api.html?highlight=rcdefaults#matplotlib.rcdefaults .. _@owena11: https://github.com/owena11 .. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _readthedocs: https://readthedocs.org/ diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index c02b61341b..68872beb64 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -69,8 +69,8 @@ This document explains the changes made to Iris for this release 📚 Documentation ================ -#. `@rcomer`_ updated the "Seasonal ensemble model plots" Gallery example. - (:pull:`3933`) +#. `@rcomer`_ updated the "Seasonal ensemble model plots" and "Global average + annual temperature maps" Gallery examples. (:pull:`3933` and :pull:`3934`) #. `@MHBalsmeier`_ described non-conda installation on Debian-based distros. (:pull:`3958`) @@ -86,6 +86,11 @@ This document explains the changes made to Iris for this release on :ref:`installing_iris` and to the footer of all pages. Also added the copyright years to the footer. (:pull:`3989`) +#. `@bjlittle`_ updated the ``intersphinx_mapping`` and fixed documentation + to use ``stable`` URLs for `matplotlib`_. (:pull:`4003`) + +#. `@bjlittle`_ added the |PyPI|_ badge to the `README.md`_. (:pull:`4004`) + 💼 Internal =========== @@ -120,4 +125,7 @@ This document explains the changes made to Iris for this release .. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _Met Office: https://www.metoffice.gov.uk/ .. _numpy: https://numpy.org/doc/stable/release/1.20.0-notes.html +.. |PyPI| image:: https://img.shields.io/pypi/v/scitools-iris?color=orange&label=pypi%7Cscitools-iris +.. _PyPI: https://pypi.org/project/scitools-iris/ .. _Python 3.8: https://www.python.org/downloads/release/python-380/ +.. _README.md: https://github.com/SciTools/iris#----- diff --git a/lib/iris/_representation.py b/lib/iris/_representation.py index 301f4a9a22..ee1e1a0d55 100644 --- a/lib/iris/_representation.py +++ b/lib/iris/_representation.py @@ -6,8 +6,10 @@ """ Provides objects describing cube summaries. """ +import re import iris.util +from iris.common.metadata import _hexdigest as quickhash class DimensionHeader: @@ -46,6 +48,35 @@ def __init__(self, cube, name_padding=35): self.dimension_header = DimensionHeader(cube) +def string_repr(text, quote_strings=False): + """Produce a one-line printable form of a text string.""" + if re.findall("[\n\t]", text) or quote_strings: + # Replace the string with its repr (including quotes). + text = repr(text) + return text + + +def array_repr(arr): + """Produce a single-line printable repr of an array.""" + # First take whatever numpy produces.. + text = repr(arr) + # ..then reduce any multiple spaces and newlines. + text = re.sub("[ \t\n]+", " ", text) + return text + + +def value_repr(value, quote_strings=False): + """ + Produce a single-line printable version of an attribute or scalar value. + """ + if hasattr(value, "dtype"): + value = array_repr(value) + elif isinstance(value, str): + value = string_repr(value, quote_strings=quote_strings) + value = str(value) + return value + + class CoordSummary: def _summary_coord_extra(self, cube, coord): # Returns the text needed to ensure this coordinate can be @@ -66,12 +97,21 @@ def _summary_coord_extra(self, cube, coord): vary.add(key) break value = similar_coord.attributes[key] - if attributes.setdefault(key, value) != value: + # Like "if attributes.setdefault(key, value) != value:" + # ..except setdefault fails if values are numpy arrays. + if key not in attributes: + attributes[key] = value + elif quickhash(attributes[key]) != quickhash(value): + # NOTE: fast and array-safe comparison, as used in + # :mod:`iris.common.metadata`. vary.add(key) break keys = sorted(vary & set(coord.attributes.keys())) bits = [ - "{}={!r}".format(key, coord.attributes[key]) for key in keys + "{}={}".format( + key, value_repr(coord.attributes[key], quote_strings=True) + ) + for key in keys ] if bits: extra = ", ".join(bits) @@ -105,13 +145,17 @@ def __init__(self, cube, coord): coord_cell = coord.cell(0) if isinstance(coord_cell.point, str): self.string_type = True + # 'lines' is value split on '\n', and _each one_ length-clipped. self.lines = [ iris.util.clip_string(str(item)) for item in coord_cell.point.split("\n") ] self.point = None self.bound = None - self.content = "\n".join(self.lines) + # 'content' contains a one-line printable version of the string, + content = string_repr(coord_cell.point) + content = iris.util.clip_string(content) + self.content = content else: self.string_type = False self.lines = None @@ -132,9 +176,6 @@ def __init__(self, cube, coord): class Section: - def _init_(self): - self.contents = [] - def is_empty(self): return self.contents == [] @@ -166,7 +207,8 @@ def __init__(self, title, attributes): self.values = [] self.contents = [] for name, value in sorted(attributes.items()): - value = iris.util.clip_string(str(value)) + value = value_repr(value) + value = iris.util.clip_string(value) self.names.append(name) self.values.append(value) content = "{}: {}".format(name, value) @@ -180,11 +222,13 @@ def __init__(self, title, cell_methods): class CubeSummary: + """ + This class provides a structure for output representations of an Iris cube. + TODO: use to produce the printout of :meth:`iris.cube.Cube.__str__`. + + """ + def __init__(self, cube, shorten=False, name_padding=35): - self.section_indent = 5 - self.item_indent = 10 - self.extra_indent = 13 - self.shorten = shorten self.header = FullHeader(cube, name_padding) # Cache the derived coords so we can rely on consistent @@ -249,9 +293,9 @@ def add_vector_section(title, contents, iscoord=True): add_vector_section("Dimension coordinates:", vector_dim_coords) add_vector_section("Auxiliary coordinates:", vector_aux_coords) add_vector_section("Derived coordinates:", vector_derived_coords) - add_vector_section("Cell Measures:", vector_cell_measures, False) + add_vector_section("Cell measures:", vector_cell_measures, False) add_vector_section( - "Ancillary Variables:", vector_ancillary_variables, False + "Ancillary variables:", vector_ancillary_variables, False ) self.scalar_sections = {} @@ -260,7 +304,7 @@ def add_scalar_section(section_class, title, *args): self.scalar_sections[title] = section_class(title, *args) add_scalar_section( - ScalarSection, "Scalar Coordinates:", cube, scalar_coords + ScalarSection, "Scalar coordinates:", cube, scalar_coords ) add_scalar_section( ScalarCellMeasureSection, diff --git a/lib/iris/tests/unit/representation/test_representation.py b/lib/iris/tests/unit/representation/test_representation.py index 212f454e70..69d2a71a97 100644 --- a/lib/iris/tests/unit/representation/test_representation.py +++ b/lib/iris/tests/unit/representation/test_representation.py @@ -54,8 +54,8 @@ def test_blank_cube(self): "Dimension coordinates:", "Auxiliary coordinates:", "Derived coordinates:", - "Cell Measures:", - "Ancillary Variables:", + "Cell measures:", + "Ancillary variables:", ] self.assertEqual( list(rep.vector_sections.keys()), expected_vector_sections @@ -66,7 +66,7 @@ def test_blank_cube(self): self.assertTrue(vector_section.is_empty()) expected_scalar_sections = [ - "Scalar Coordinates:", + "Scalar coordinates:", "Scalar cell measures:", "Attributes:", "Cell methods:", @@ -103,21 +103,28 @@ def test_scalar_coord(self): scalar_coord_with_bounds = AuxCoord( [10], long_name="foo", units="K", bounds=[(5, 15)] ) - scalar_coord_text = AuxCoord( - ["a\nb\nc"], long_name="foo", attributes={"key": "value"} + scalar_coord_simple_text = AuxCoord( + ["this and that"], + long_name="foo", + attributes={"key": 42, "key2": "value-str"}, + ) + scalar_coord_awkward_text = AuxCoord( + ["a is\nb\n and c"], long_name="foo_2" ) cube.add_aux_coord(scalar_coord_no_bounds) cube.add_aux_coord(scalar_coord_with_bounds) - cube.add_aux_coord(scalar_coord_text) + cube.add_aux_coord(scalar_coord_simple_text) + cube.add_aux_coord(scalar_coord_awkward_text) rep = iris._representation.CubeSummary(cube) - scalar_section = rep.scalar_sections["Scalar Coordinates:"] + scalar_section = rep.scalar_sections["Scalar coordinates:"] - self.assertEqual(len(scalar_section.contents), 3) + self.assertEqual(len(scalar_section.contents), 4) no_bounds_summary = scalar_section.contents[0] bounds_summary = scalar_section.contents[1] - text_summary = scalar_section.contents[2] + text_summary_simple = scalar_section.contents[2] + text_summary_awkward = scalar_section.contents[3] self.assertEqual(no_bounds_summary.name, "bar") self.assertEqual(no_bounds_summary.content, "10 K") @@ -127,9 +134,15 @@ def test_scalar_coord(self): self.assertEqual(bounds_summary.content, "10 K, bound=(5, 15) K") self.assertEqual(bounds_summary.extra, "") - self.assertEqual(text_summary.name, "foo") - self.assertEqual(text_summary.content, "a\nb\nc") - self.assertEqual(text_summary.extra, "key='value'") + self.assertEqual(text_summary_simple.name, "foo") + self.assertEqual(text_summary_simple.content, "this and that") + self.assertEqual(text_summary_simple.lines, ["this and that"]) + self.assertEqual(text_summary_simple.extra, "key=42, key2='value-str'") + + self.assertEqual(text_summary_awkward.name, "foo_2") + self.assertEqual(text_summary_awkward.content, r"'a is\nb\n and c'") + self.assertEqual(text_summary_awkward.lines, ["a is", "b", " and c"]) + self.assertEqual(text_summary_awkward.extra, "") def test_cell_measure(self): cube = self.cube @@ -137,7 +150,7 @@ def test_cell_measure(self): cube.add_cell_measure(cell_measure, 0) rep = iris._representation.CubeSummary(cube) - cm_section = rep.vector_sections["Cell Measures:"] + cm_section = rep.vector_sections["Cell measures:"] self.assertEqual(len(cm_section.contents), 1) cm_summary = cm_section.contents[0] @@ -150,7 +163,7 @@ def test_ancillary_variable(self): cube.add_ancillary_variable(cell_measure, 0) rep = iris._representation.CubeSummary(cube) - av_section = rep.vector_sections["Ancillary Variables:"] + av_section = rep.vector_sections["Ancillary variables:"] self.assertEqual(len(av_section.contents), 1) av_summary = av_section.contents[0] @@ -159,12 +172,14 @@ def test_ancillary_variable(self): def test_attributes(self): cube = self.cube - cube.attributes = {"a": 1, "b": "two"} + cube.attributes = {"a": 1, "b": "two", "c": " this \n that\tand."} rep = iris._representation.CubeSummary(cube) attribute_section = rep.scalar_sections["Attributes:"] attribute_contents = attribute_section.contents - expected_contents = ["a: 1", "b: two"] + expected_contents = ["a: 1", "b: two", "c: ' this \\n that\\tand.'"] + # Note: a string with \n or \t in it gets "repr-d". + # Other strings don't (though in coord 'extra' lines, they do.) self.assertEqual(attribute_contents, expected_contents) @@ -182,6 +197,108 @@ def test_cell_methods(self): expected_contents = ["mean: x, y", "mean: x"] self.assertEqual(cell_method_section.contents, expected_contents) + def test_scalar_cube(self): + cube = self.cube + while cube.ndim > 0: + cube = cube[0] + rep = iris._representation.CubeSummary(cube) + self.assertEqual(rep.header.nameunit, "air_temperature / (K)") + self.assertTrue(rep.header.dimension_header.scalar) + self.assertEqual(rep.header.dimension_header.dim_names, []) + self.assertEqual(rep.header.dimension_header.shape, []) + self.assertEqual(rep.header.dimension_header.contents, ["scalar cube"]) + self.assertEqual(len(rep.vector_sections), 5) + self.assertTrue( + all(sect.is_empty() for sect in rep.vector_sections.values()) + ) + self.assertEqual(len(rep.scalar_sections), 4) + self.assertEqual( + len(rep.scalar_sections["Scalar coordinates:"].contents), 1 + ) + self.assertTrue( + rep.scalar_sections["Scalar cell measures:"].is_empty() + ) + self.assertTrue(rep.scalar_sections["Attributes:"].is_empty()) + self.assertTrue(rep.scalar_sections["Cell methods:"].is_empty()) + + def test_coord_attributes(self): + cube = self.cube + co1 = cube.coord("latitude") + co1.attributes.update(dict(a=1, b=2)) + co2 = co1.copy() + co2.attributes.update(dict(a=7, z=77, text="ok", text2="multi\nline")) + cube.add_aux_coord(co2, cube.coord_dims(co1)) + rep = iris._representation.CubeSummary(cube) + co1_summ = rep.vector_sections["Dimension coordinates:"].contents[0] + co2_summ = rep.vector_sections["Auxiliary coordinates:"].contents[0] + # Notes: 'b' is same so does not appear; sorted order; quoted strings. + self.assertEqual(co1_summ.extra, "a=1") + self.assertEqual( + co2_summ.extra, "a=7, text='ok', text2='multi\\nline', z=77" + ) + + def test_array_attributes(self): + cube = self.cube + co1 = cube.coord("latitude") + co1.attributes.update(dict(a=1, array=np.array([1.2, 3]))) + co2 = co1.copy() + co2.attributes.update(dict(b=2, array=np.array([3.2, 1]))) + cube.add_aux_coord(co2, cube.coord_dims(co1)) + rep = iris._representation.CubeSummary(cube) + co1_summ = rep.vector_sections["Dimension coordinates:"].contents[0] + co2_summ = rep.vector_sections["Auxiliary coordinates:"].contents[0] + self.assertEqual(co1_summ.extra, "array=array([1.2, 3. ])") + self.assertEqual(co2_summ.extra, "array=array([3.2, 1. ]), b=2") + + def test_attributes_subtle_differences(self): + cube = Cube([0]) + + # Add a pair that differ only in having a list instead of an array. + co1a = DimCoord( + [0], + long_name="co1_list_or_array", + attributes=dict(x=1, arr1=np.array(2), arr2=np.array([1, 2])), + ) + co1b = co1a.copy() + co1b.attributes.update(dict(arr2=[1, 2])) + for co in (co1a, co1b): + cube.add_aux_coord(co) + + # Add a pair that differ only in an attribute array dtype. + co2a = AuxCoord( + [0], + long_name="co2_dtype", + attributes=dict(x=1, arr1=np.array(2), arr2=np.array([3, 4])), + ) + co2b = co2a.copy() + co2b.attributes.update(dict(arr2=np.array([3.0, 4.0]))) + assert co2b != co2a + for co in (co2a, co2b): + cube.add_aux_coord(co) + + # Add a pair that differ only in an attribute array shape. + co3a = DimCoord( + [0], + long_name="co3_shape", + attributes=dict(x=1, arr1=np.array([5, 6]), arr2=np.array([3, 4])), + ) + co3b = co3a.copy() + co3b.attributes.update(dict(arr1=np.array([[5], [6]]))) + for co in (co3a, co3b): + cube.add_aux_coord(co) + + rep = iris._representation.CubeSummary(cube) + co_summs = rep.scalar_sections["Scalar coordinates:"].contents + co1a_summ, co1b_summ = co_summs[0:2] + self.assertEqual(co1a_summ.extra, "arr2=array([1, 2])") + self.assertEqual(co1b_summ.extra, "arr2=[1, 2]") + co2a_summ, co2b_summ = co_summs[2:4] + self.assertEqual(co2a_summ.extra, "arr2=array([3, 4])") + self.assertEqual(co2b_summ.extra, "arr2=array([3., 4.])") + co3a_summ, co3b_summ = co_summs[4:6] + self.assertEqual(co3a_summ.extra, "arr1=array([5, 6])") + self.assertEqual(co3b_summ.extra, "arr1=array([[5], [6]])") + if __name__ == "__main__": tests.main() diff --git a/setup.py b/setup.py index b1c8939fdd..f4bfe4cf08 100644 --- a/setup.py +++ b/setup.py @@ -263,7 +263,7 @@ def long_description(): author="UK Met Office", author_email="scitools-iris-dev@googlegroups.com", description="A powerful, format-agnostic, community-driven Python " - "library for analysing and visualising Earth science data", + "package for analysing and visualising Earth science data", long_description=long_description(), long_description_content_type="text/markdown", packages=find_package_tree("lib/iris", "iris"), From 41a72f9f73657c7dfd2254d2407691efe7112283 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 15 Feb 2021 14:57:45 +0000 Subject: [PATCH 04/22] MeshMetadata class. (#4002) * MeshMetadata class. * MeshMetadata extra members for dim names. * Comment for BaseMetadata refactoring. --- lib/iris/common/metadata.py | 13 +- lib/iris/experimental/ugrid.py | 134 ++- .../experimental/ugrid/test_MeshMetadata.py | 784 ++++++++++++++++++ 3 files changed, 917 insertions(+), 14 deletions(-) create mode 100644 lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 3f8d7e6bf0..c27b488b3b 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -181,9 +181,18 @@ def func(field): return result # Note that, for strict we use "_fields" not "_members". - # The "circular" and "src_dim" members do not participate in strict equivalence. + # TODO: refactor so that 'non-participants' can be held in their specific subclasses. + # Certain members never participate in strict equivalence, so + # are filtered out. fields = filter( - lambda field: field not in ("circular", "src_dim"), + lambda field: field + not in ( + "circular", + "src_dim", + "node_dimension", + "edge_dimension", + "face_dimension", + ), self._fields, ) result = all([func(field) for field in fields]) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 994671c8b9..3a9759073e 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -497,7 +497,7 @@ def xml_element(self, doc): class ConnectivityMetadata(BaseMetadata): """ - Metadata container for a :class:`~iris.coords.Connectivity`. + Metadata container for a :class:`~iris.experimental.ugrid.Connectivity`. """ @@ -615,6 +615,127 @@ def equal(self, other, lenient=None): return super().equal(other, lenient=lenient) +class MeshMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.experimental.ugrid.Mesh`. + + """ + + # The node_dimension", "edge_dimension" and "face_dimension" members are + # stateful only; they not participate in lenient/strict equivalence. + _members = ( + "topology_dimension", + "node_dimension", + "edge_dimension", + "face_dimension", + ) + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + return super().__eq__(other) + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members for meshes. + + Args: + + * other (MeshMetadata): + The other mesh metadata participating in the lenient + combination. + + Returns: + A list of combined metadata member values. + + """ + + # Perform "strict" combination for "topology_dimension", + # "node_dimension", "edge_dimension" and "face_dimension". + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return left if left == right else None + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in MeshMetadata._members] + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.extend(values) + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members for meshes. + + Args: + + * other (MeshMetadata): + The other mesh metadata participating in the lenient + comparison. + + Returns: + Boolean. + + """ + # Perform "strict" comparison for "topology_dimension". + # "node_dimension", "edge_dimension" and "face_dimension" are not part + # of lenient equivalence at all. + result = self.topology_dimension == other.topology_dimension + if result: + # Perform lenient comparison of the other parent members. + result = super()._compare_lenient(other) + + return result + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members for meshes. + + Args: + + * other (MeshMetadata): + The other mesh metadata participating in the lenient + difference. + + Returns: + A list of difference metadata member values. + + """ + # Perform "strict" difference for "topology_dimension", + # "node_dimension", "edge_dimension" and "face_dimension". + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return None if left == right else (left, right) + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in MeshMetadata._members] + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.extend(values) + + return result + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + return super().equal(other, lenient=lenient) + + # class Mesh(CFVariableMixin): # """ # @@ -876,17 +997,6 @@ def equal(self, other, lenient=None): # """ # return self._metadata_manager.topology_dimension # -# -# class MeshMetadata(BaseMetadata): -# """ -# .. notes:: -# -# - topology_dimension is treated strictly in both -# strict and lenient modes, and does participate in __eq__ -# """ -# _members = "topology_dimension" -# -# # # # # - validate coord_systems # # - validate climatological diff --git a/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py new file mode 100644 index 0000000000..cfc668fb88 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py @@ -0,0 +1,784 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.MeshMetadata`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from copy import deepcopy +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata +from iris.experimental.ugrid import MeshMetadata + + +class Test(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.topology_dimension = mock.sentinel.topology_dimension + self.node_dimension = mock.sentinel.node_dimension + self.edge_dimension = mock.sentinel.edge_dimension + self.face_dimension = mock.sentinel.face_dimension + self.cls = MeshMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + topology_dimension=self.topology_dimension, + node_dimension=self.node_dimension, + edge_dimension=self.edge_dimension, + face_dimension=self.face_dimension, + ) + fmt = ( + "MeshMetadata(standard_name={!r}, long_name={!r}, " + "var_name={!r}, units={!r}, attributes={!r}, " + "topology_dimension={!r}, node_dimension={!r}, " + "edge_dimension={!r}, face_dimension={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + self.topology_dimension, + self.node_dimension, + self.edge_dimension, + self.face_dimension, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + "topology_dimension", + "node_dimension", + "edge_dimension", + "face_dimension", + ) + self.assertEqual(self.cls._fields, expected) + + def test_bases(self): + self.assertTrue(issubclass(self.cls, BaseMetadata)) + + +class Test__eq__(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + topology_dimension=sentinel.topology_dimension, + node_dimension=sentinel.node_dimension, + edge_dimension=sentinel.edge_dimension, + face_dimension=sentinel.face_dimension, + ) + self.dummy = sentinel.dummy + self.cls = MeshMetadata + # The "node_dimension", "edge_dimension" and "face_dimension" members + # are stateful only; they do not participate in lenient/strict equivalence. + self.members_dim_names = filter( + lambda member: member + in ("node_dimension", "edge_dimension", "face_dimension"), + self.cls._members, + ) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_call(self): + other = sentinel.other + return_value = sentinel.return_value + metadata = self.cls(*(None,) * len(self.cls._fields)) + with mock.patch.object( + BaseMetadata, "__eq__", return_value=return_value + ) as mocker: + result = metadata.__eq__(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_topology_dim_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["topology_dimension"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_dim_names_none(self): + for member in self.members_dim_names: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_topology_dim(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["topology_dimension"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_dim_names(self): + for member in self.members_dim_names: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_topology_dim(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["topology_dimension"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_dim_names(self): + for member in self.members_dim_names: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_topology_dim_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["topology_dimension"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_dim_names_none(self): + for member in self.members_dim_names: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = MeshMetadata + self.one = self.cls(1, 1, 1, 1, 1, 1, 1, 1, 1) + self.two = self.cls(1, 1, 1, 2, 1, 1, 1, 1, 1) + self.none = self.cls(1, 1, 1, None, 1, 1, 1, 1, 1) + self.attributes = self.cls(1, 1, 1, 1, 10, 1, 1, 1, 1) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes(self): + result = self.one < self.attributes + self.assertFalse(result) + result = self.attributes < self.one + self.assertFalse(result) + + +class Test_combine(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + topology_dimension=sentinel.topology_dimension, + node_dimension=sentinel.node_dimension, + edge_dimension=sentinel.edge_dimension, + face_dimension=sentinel.face_dimension, + ) + self.dummy = sentinel.dummy + self.cls = MeshMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__ + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = right.copy() + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertTrue( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertTrue( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["units"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + topology_dimension=sentinel.topology_dimension, + node_dimension=sentinel.node_dimension, + edge_dimension=sentinel.edge_dimension, + face_dimension=sentinel.face_dimension, + ) + self.dummy = sentinel.dummy + self.cls = MeshMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__ + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + member_value = getattr(lmetadata, member) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (member_value, None) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = (None, member_value) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["units"] = (left["units"], right["units"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["units"] = lexpected["units"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + +class Test_equal(tests.IrisTest): + def setUp(self): + self.cls = MeshMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue(_LENIENT[self.cls.equal]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + if __name__ == "__main__": + tests.main() From 30f92208ff192c44924d86277a4042608eb018c4 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Tue, 16 Feb 2021 09:19:28 +0000 Subject: [PATCH 05/22] add meshmetadata services (#4012) --- lib/iris/experimental/ugrid.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 3a9759073e..488212ff9b 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -31,6 +31,7 @@ __all__ = [ "Connectivity", "ConnectivityMetadata", + "MeshMetadata", ] @@ -1187,16 +1188,24 @@ def equal(self, other, lenient=None): #: Convenience collection of lenient metadata combine services. -SERVICES_COMBINE.append(ConnectivityMetadata.combine) -SERVICES.append(ConnectivityMetadata.combine) +_services = [ConnectivityMetadata.combine, MeshMetadata.combine] +SERVICES_COMBINE.extend(_services) +SERVICES.extend(_services) #: Convenience collection of lenient metadata difference services. -SERVICES_DIFFERENCE.append(ConnectivityMetadata.difference) -SERVICES.append(ConnectivityMetadata.difference) +_services = [ConnectivityMetadata.difference, MeshMetadata.difference] +SERVICES_DIFFERENCE.extend(_services) +SERVICES.extend(_services) #: Convenience collection of lenient metadata equality services. -SERVICES_EQUAL.extend( - [ConnectivityMetadata.__eq__, ConnectivityMetadata.equal] -) -SERVICES.extend([ConnectivityMetadata.__eq__, ConnectivityMetadata.equal]) +_services = [ + ConnectivityMetadata.__eq__, + ConnectivityMetadata.equal, + MeshMetadata.__eq__, + MeshMetadata.equal, +] +SERVICES_EQUAL.extend(_services) +SERVICES.extend(_services) + +del _services From 682136bee1f45c67cd5d492647fa0afe73cd1209 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Fri, 19 Feb 2021 13:59:58 +0000 Subject: [PATCH 06/22] Mesh api coord manager (#4015) * add mesh coordinate manager * wip * make shape methods private + reorganise method order * review actions * partial mesh * wip --- lib/iris/experimental/ugrid.py | 1297 +++++++++++++++++++++++--------- 1 file changed, 944 insertions(+), 353 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 488212ff9b..002c40952f 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -10,6 +10,7 @@ """ +from collections import Mapping, namedtuple from functools import wraps import dask.array as da @@ -17,7 +18,10 @@ from .. import _lazy_data as _lazy from ..common.metadata import ( + _hexdigest, BaseMetadata, + CoordMetadata, + DimCoordMetadata, metadata_manager_factory, SERVICES, SERVICES_COMBINE, @@ -25,16 +29,48 @@ SERVICES_DIFFERENCE, ) from ..common.lenient import _lenient_service as lenient_service -from ..coords import _DimensionalMetadata +from ..common.mixin import CFVariableMixin +from ..config import get_logger +from ..coords import _DimensionalMetadata, AuxCoord +from ..exceptions import CoordinateNotFoundError +from ..util import guess_coord_axis __all__ = [ "Connectivity", "ConnectivityMetadata", + "Mesh1DCoords", + "Mesh2DCoords", + "MeshEdgeCoords", + "MeshFaceCoords", + "MeshNodeCoords", "MeshMetadata", ] +# Configure the logger. +logger = get_logger(__name__, fmt="[%(cls)s.%(funcName)s]") + + +# Mesh dimension names namedtuples. +Mesh1DNames = namedtuple("Mesh1DNames", ["node_dimension", "edge_dimension"]) +Mesh2DNames = namedtuple( + "Mesh2DNames", ["node_dimension", "edge_dimension", "face_dimension"] +) + +# Mesh coordinate manager namedtuples. +Mesh1DCoords = namedtuple( + "Mesh1DCoords", ["node_x", "node_y", "edge_x", "edge_y"] +) +Mesh2DCoords = namedtuple( + "Mesh2DCoords", + ["node_x", "node_y", "edge_x", "edge_y", "face_x", "face_y"], +) +MeshNodeCoords = namedtuple("MeshNodeCoords", ["node_x", "node_y"]) +MeshEdgeCoords = namedtuple("MeshEdgeCoords", ["edge_x", "edge_y"]) +MeshFaceCoords = namedtuple("MeshFaceCoords", ["face_x", "face_y"]) + + class Connectivity(_DimensionalMetadata): """ A CF-UGRID topology connectivity, describing the topological relationship @@ -737,358 +773,913 @@ def equal(self, other, lenient=None): return super().equal(other, lenient=lenient) -# class Mesh(CFVariableMixin): -# """ -# -# .. todo:: -# -# .. questions:: -# -# - decide on the verbose/succinct version of __str__ vs __repr__ -# -# .. notes:: -# -# - the mesh is location agnostic -# -# - no need to support volume at mesh level, yet -# -# - topology_dimension -# - use for fast equality between Mesh instances -# - checking connectivity dimensionality, specifically the highest dimensonality of the -# "geometric element" being added i.e., reference the src_location/tgt_location -# - used to honour and enforce the minimum UGRID connectivity contract -# -# - support pickling -# -# - copy is off the table!! -# -# - MeshCoord.guess_points() -# - MeshCoord.to_AuxCoord() -# -# - don't provide public methods to return the coordinate and connectivity -# managers -# -# """ -# def __init__( -# self, -# topology_dimension, -# standard_name=None, -# long_name=None, -# var_name=None, -# units=None, -# attributes=None, -# node_dimension=None, -# edge_dimension=None, -# face_dimension=None, -# node_coords_and_axes=None, # [(coord, "x"), (coord, "y")] this is a stronger contract, not relying on guessing -# edge_coords_and_axes=None, # ditto -# face_coords_and_axes=None, # ditto -# connectivities=None, # [Connectivity, [Connectivity], ...] -# ): -# # TODO: support volumes. -# # TODO: support (coord, "z") -# -# # These are strings, if None is provided then assign the default string. -# self.node_dimension = node_dimension -# self.edge_dimension = edge_dimension -# self.face_dimension = face_dimension -# -# self._metadata_manager = metadata_manager_factory(MeshMetadata) -# -# self._metadata_manager.topology_dimension = topology_dimension -# -# self.standard_name = standard_name -# self.long_name = long_name -# self.var_name = var_name -# self.units = units -# self.attributes = attributes -# -# # based on the topology_dimension create the appropriate coordinate manager -# # with some intelligence -# self._coord_manager = ... -# -# # based on the topology_dimension create the appropriate connectivity manager -# # with some intelligence -# self._connectivity_manager = ... -# -# @property -# def all_coords(self): -# # return a namedtuple -# # coords = mesh.all_coords -# # coords.face_x, coords.edge_y -# pass -# -# @property -# def node_coords(self): -# # return a namedtuple -# # node_coords = mesh.node_coords -# # node_coords.x -# # node_coords.y -# pass -# -# @property -# def edge_coords(self): -# # as above -# pass -# -# @property -# def face_coords(self): -# # as above -# pass -# -# @property -# def all_connectivities(self): -# # return a namedtuple -# # conns = mesh.all_connectivities -# # conns.edge_node, conns.boundary_node -# pass -# -# @property -# def face_node_connectivity(self): -# # required -# return self._connectivity_manager.face_node -# -# @property -# def edge_node_connectivity(self): -# # optionally required -# return self._connectivity_manager.edge_node -# -# @property -# def face_edge_connectivity(self): -# # optional -# return self._connectivity_manager.face_edge -# -# @property -# def face_face_connectivity(self): -# # optional -# return self._connectivity_manager.face_face -# -# @property -# def edge_face_connectivity(self): -# # optional -# return self._connectivity_manager.edge_face -# -# @property -# def boundary_node_connectivity(self): -# # optional -# return self._connectivity_manager.boundard_node -# -# def coord(self, ...): -# # as Cube.coord i.e., ensure that one and only one coord-like is returned -# # otherwise raise and exception -# pass -# -# def coords( -# self, -# name_or_coord=None, -# standard_name=None, -# long_name=None, -# var_name=None, -# attributes=None, -# axis=None, -# node=False, -# edge=False, -# face=False, -# ): -# # do we support the coord_system kwargs? -# self._coord_manager.coords(...) -# -# def connectivity(self, ...): -# pass -# -# def connectivities( -# self, -# name_or_coord=None, -# standard_name=None, -# long_name=None, -# var_name=None, -# attributes=None, -# node=False, -# edge=False, -# face=False, -# ): -# pass -# -# def add_coords(self, node_x=None, node_y=None, edge_x=None, edge_y=None, face_x=None, face_y=None): -# # this supports adding a new coord to the manager, but also replacing an existing coord -# self._coord_manager.add(...) -# -# def add_connectivities(self, *args): -# # this supports adding a new connectivity to the manager, but also replacing an existing connectivity -# self._connectivity_manager.add(*args) -# -# def remove_coords(self, ...): -# # could provide the "name", "metadata", "coord"-instance -# # this could use mesh.coords() to find the coords -# self._coord_manager.remove(...) -# -# def remove_connectivities(self, ...): -# # needs to respect the minimum UGRID contract -# self._connectivity_manager.remove(...) -# -# def __eq__(self, other): -# # Full equality could be MASSIVE, so we want to avoid that. -# # Ideally we want a mesh signature from LFRic for comparison, although this would -# # limit Iris' relevance outside MO. -# # TL;DR: unknown quantity. -# raise NotImplemented -# -# def __ne__(self, other): -# # See __eq__ -# raise NotImplemented -# -# def __str__(self): -# pass -# -# def __repr__(self): -# pass -# -# def __unicode__(self, ...): -# pass -# -# def __getstate__(self): -# pass -# -# def __setstate__(self, state): -# pass -# -# def xml_element(self): -# pass -# -# # the MeshCoord will always have bounds, perhaps points. However the MeshCoord.guess_points() may -# # be a very useful part of its behaviour. -# # after using MeshCoord.guess_points(), the user may wish to add the associated MeshCoord.points into -# # the Mesh as face_coordinates. -# -# def to_AuxCoord(self, location, axis): -# # factory method -# # return the lazy AuxCoord(...) for the given location and axis -# -# def to_AuxCoords(self, location): -# # factory method -# # return the lazy AuxCoord(...), AuxCoord(...) -# -# def to_MeshCoord(self, location, axis): -# # factory method -# # return MeshCoord(..., location=location, axis=axis) -# # use Connectivity.indices_by_src() for fetching indices. -# -# def to_MeshCoords(self, location): -# # factory method -# # return MeshCoord(..., location=location, axis="x"), MeshCoord(..., location=location, axis="y") -# # use Connectivity.indices_by_src() for fetching indices. -# -# def dimension_names_reset(self, node=False, face=False, edge=False): -# # reset to defaults like this (suggestion) -# -# def dimension_names(self, node=None, face=None, edge=None): -# # e.g., only set self.node iff node != None. these attributes will -# # always be set to a user provided string or the default string. -# # return a namedtuple of dict-like -# -# @property -# def cf_role(self): -# return "mesh_topology" -# -# @property -# def topology_dimension(self): -# """ -# read-only -# -# """ -# return self._metadata_manager.topology_dimension -# -# # -# # - validate coord_systems -# # - validate climatological -# # - use guess_coord_axis (iris.utils) -# # - others? -# # -# class _Mesh1DCoordinateManager: -# REQUIRED = ( -# "node_x", -# "node_y", -# ) -# OPTIONAL = ( -# "edge_x", -# "edge_y", -# ) -# def __init__(self, node_x, node_y, edge_x=None, edge_y=None): -# # required -# self.node_x = node_x -# self.node_y = node_y -# # optional -# self.edge_x = edge_x -# self.edge_y = edge_y -# -# # WOO-GA - this can easily get out of sync with the self attributes. -# # choose the container wisely e.g., could be an dict..., also the self -# # attributes may need to be @property's that access the chosen _members container -# self._members = [ ... ] -# -# def __iter__(self): -# for member in self._members: -# yield member -# -# def __getstate__(self): -# pass -# -# def __setstate__(self, state): -# pass -# -# def coord(self, **kwargs): -# # see Cube.coord for pattern, checking for a single result -# return self.coords(**kwargs)[0] -# -# def coords(self, ...): -# # see Cube.coords for relevant patterns -# # return [ ... ] -# pass -# -# def add(self, **kwargs): -# pass -# -# def remove(self, ...): -# # needs to respect the minimum UGRID contract -# # use logging/warning to flag items not removed - highlight in doc-string -# # don't raise an exception -# -# def __str__(self): -# pass -# -# def __repr__(self): -# pass -# -# def __eq__(self, other): -# # Full equality could be MASSIVE, so we want to avoid that. -# # Ideally we want a mesh signature from LFRic for comparison, although this would -# # limit Iris' relevance outside MO. -# # TL;DR: unknown quantity. -# raise NotImplemented -# -# def __ne__(self, other): -# # See __eq__ -# raise NotImplemented -# -# -# class _Mesh2DCoordinateManager(_Mesh1DCoordinateManager): -# OPTIONAL = ( -# "edge_x", -# "edge_y", -# "face_x", -# "face_y", -# ) -# def __init__(self, node_x, node_y, edge_x=None, edge_y=None, face_x=None, face_y=None): -# # optional -# self.face_x = face_x -# self.face_y = face_y -# -# super().__init__(node_x, node_y, edge_x=edge_x, edge_y=edge_y) -# -# # does the order matter? -# self._members.extend([self.face_x, self.face_y]) -# -# +class Mesh(CFVariableMixin): + """ + + .. todo:: + + .. questions:: + + - decide on the verbose/succinct version of __str__ vs __repr__ + + .. notes:: + + - the mesh is location agnostic + + - no need to support volume at mesh level, yet + + - topology_dimension + - use for fast equality between Mesh instances + - checking connectivity dimensionality, specifically the highest dimensonality of the + "geometric element" being added i.e., reference the src_location/tgt_location + - used to honour and enforce the minimum UGRID connectivity contract + + - support pickling + + - copy is off the table!! + + - MeshCoord.guess_points() + - MeshCoord.to_AuxCoord() + + - don't provide public methods to return the coordinate and connectivity + managers + + - validate both managers contents e.g., shape? more...? + + """ + + # TBD: for volume and/or z-axis support include axis "z" and/or dimension "3" + AXES = ("x", "y") + TOPOLOGY_DIMENSIONS = (1, 2) + + def __init__( + self, + topology_dimension, + node_coords_and_axes, + standard_name=None, + long_name=None, + var_name=None, + units=None, + attributes=None, + edge_coords_and_axes=None, + face_coords_and_axes=None, + # connectivities=None, + node_dimension=None, + edge_dimension=None, + face_dimension=None, + ): + # TODO: support volumes. + # TODO: support (coord, "z") + + self._metadata_manager = metadata_manager_factory(MeshMetadata) + + # topology_dimension is read-only, so assign directly to the metadata manager + if topology_dimension not in self.TOPOLOGY_DIMENSIONS: + emsg = f"Expected 'topology_dimension' in range {self.TOPOLOGY_DIMENSIONS!r}, got {topology_dimension!r}." + raise ValueError(emsg) + self._metadata_manager.topology_dimension = topology_dimension + + # TBD: these are strings, if None is provided then assign the default string. + self.node_dimension = node_dimension + self.edge_dimension = edge_dimension + self.face_dimension = face_dimension + + # assign the metadata to the metadata manager + self.standard_name = standard_name + self.long_name = long_name + self.var_name = var_name + self.units = units + self.attributes = attributes + + # based on the topology_dimension, create the appropriate coordinate manager + def normalise(location, axis): + result = str(axis).lower() + if result not in self.AXES: + emsg = f"Invalid axis specified for {location} coordinate {coord.name()!r}, got {axis!r}." + raise ValueError(emsg) + return f"{location}_{axis}" + + kwargs = {} + for coord, axis in node_coords_and_axes: + kwargs[normalise("node", axis)] = coord + if edge_coords_and_axes is not None: + for coord, axis in edge_coords_and_axes: + kwargs[normalise("edge", axis)] = coord + if face_coords_and_axes is not None: + for coord, axis in face_coords_and_axes: + kwargs[normalise("face", axis)] = coord + + # check the UGRID minimum requirement for coordinates + if "node_x" not in kwargs: + emsg = ( + "Require a node coordinate that is x-axis like to be provided." + ) + raise ValueError(emsg) + if "node_y" not in kwargs: + emsg = ( + "Require a node coordinate that is y-axis like to be provided." + ) + raise ValueError(emsg) + + if self.topology_dimension == 1: + self._coord_manager = _Mesh1DCoordinateManager(**kwargs) + elif self.topology_dimension == 2: + self._coord_manager = _Mesh2DCoordinateManager(**kwargs) + else: + emsg = f"Unsupported 'topology_dimension', got {topology_dimension!r}." + raise NotImplementedError(emsg) + + # based on the topology_dimension, create the appropriate connectivity manager + # self._connectivity_manager = ... + + def __eq__(self, other): + # TBD + return NotImplemented + + def __getstate__(self): + # TBD + pass + + def __ne__(self, other): + # TBD + return NotImplemented + + def __repr__(self): + # TBD + args = [] + return f"{self.__class__.__name__}({', '.join(args)})" + + def __setstate__(self, state): + # TBD + pass + + def __str__(self): + # TBD + args = [] + return f"{self.__class__.__name__}({', '.join(args)})" + + @property + def all_coords(self): + return self._coord_manager.all_members + + @property + def edge_dimension(self): + return self._edge_dimension + + @edge_dimension.setter + def edge_dimension(self, name): + if not name or not isinstance(name, str): + self._edge_dimension = f"Mesh{self.topology_dimension}d_edge" + else: + self._edge_dimension = name + + @property + def edge_coords(self): + return self._coord_manager.edge_coords + + @property + def face_dimension(self): + return self._face_dimension + + @face_dimension.setter + def face_dimension(self, name): + if not name or not isinstance(name, str): + self._face_dimension = f"Mesh{self.topology_dimension}d_face" + else: + self._face_dimension = name + + @property + def face_coords(self): + return self._coord_manager.face_coords + + @property + def node_dimension(self): + return self._node_dimension + + @node_dimension.setter + def node_dimension(self, name): + if not name or not isinstance(name, str): + self._node_dimension = f"Mesh{self.topology_dimension}d_node" + else: + self._node_dimension = name + + @property + def node_coords(self): + return self._coord_manager.node_coords + + # @property + # def all_connectivities(self): + # # return a namedtuple + # # conns = mesh.all_connectivities + # # conns.edge_node, conns.boundary_node + # pass + # + # @property + # def face_node_connectivity(self): + # # required + # return self._connectivity_manager.face_node + # + # @property + # def edge_node_connectivity(self): + # # optionally required + # return self._connectivity_manager.edge_node + # + # @property + # def face_edge_connectivity(self): + # # optional + # return self._connectivity_manager.face_edge + # + # @property + # def face_face_connectivity(self): + # # optional + # return self._connectivity_manager.face_face + # + # @property + # def edge_face_connectivity(self): + # # optional + # return self._connectivity_manager.edge_face + # + # @property + # def boundary_node_connectivity(self): + # # optional + # return self._connectivity_manager.boundary_node + + def add_coords( + self, + node_x=None, + node_y=None, + edge_x=None, + edge_y=None, + face_x=None, + face_y=None, + ): + self._coord_manager.add( + node_x=node_x, + node_y=node_y, + edge_x=edge_x, + edge_y=edge_y, + face_x=face_x, + face_y=face_y, + ) + + # def add_connectivities(self, *args): + # # this supports adding a new connectivity to the manager, but also replacing an existing connectivity + # self._connectivity_manager.add(*args) + + # def connectivities( + # self, + # name_or_coord=None, + # standard_name=None, + # long_name=None, + # var_name=None, + # attributes=None, + # node=False, + # edge=False, + # face=False, + # ): + # pass + + # def connectivity(self, ...): + # pass + + def coord( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + node=None, + edge=None, + face=None, + ): + return self._coord_manager.filter( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + node=node, + edge=edge, + face=face, + ) + + def coords( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + node=False, + edge=False, + face=False, + ): + return self._coord_manager.filters( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + node=node, + edge=edge, + face=face, + ) + + # def remove_connectivities(self, ...): + # # needs to respect the minimum UGRID contract + # self._connectivity_manager.remove(...) + + def remove_coords( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + node=None, + edge=None, + face=None, + ): + self._coord_manager.remove( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + node=node, + edge=edge, + face=face, + ) + + def xml_element(self): + # TBD + pass + + # the MeshCoord will always have bounds, perhaps points. However the MeshCoord.guess_points() may + # be a very useful part of its behaviour. + # after using MeshCoord.guess_points(), the user may wish to add the associated MeshCoord.points into + # the Mesh as face_coordinates. + + # def to_AuxCoord(self, location, axis): + # # factory method + # # return the lazy AuxCoord(...) for the given location and axis + # + # def to_AuxCoords(self, location): + # # factory method + # # return the lazy AuxCoord(...), AuxCoord(...) + # + # def to_MeshCoord(self, location, axis): + # # factory method + # # return MeshCoord(..., location=location, axis=axis) + # # use Connectivity.indices_by_src() for fetching indices. + # + # def to_MeshCoords(self, location): + # # factory method + # # return MeshCoord(..., location=location, axis="x"), MeshCoord(..., location=location, axis="y") + # # use Connectivity.indices_by_src() for fetching indices. + + def dimension_names_reset(self, node=False, edge=False, face=False): + if node: + self.node_dimension = None + if edge: + self.edge_dimension = None + if face: + self.face_dimension = None + if self.topology_dimension == 1: + result = Mesh1DNames(self.node_dimension, self.edge_dimension) + else: + result = Mesh2DNames( + self.node_dimension, self.edge_dimension, self.face_dimension + ) + return result + + def dimension_names(self, node=None, edge=None, face=None): + if node: + self.node_dimension = node + if edge: + self.edge_dimension = edge + if face: + self.face_dimension = face + if self.topology_dimension == 1: + result = Mesh1DNames(self.node_dimension, self.edge_dimension) + else: + result = Mesh2DNames( + self.node_dimension, self.edge_dimension, self.node_dimension + ) + return result + + @property + def cf_role(self): + return "mesh_topology" + + @property + def topology_dimension(self): + return self._metadata_manager.topology_dimension + + +class _Mesh1DCoordinateManager: + """ + + TBD: require clarity on coord_systems validation + TBD: require clarity on __eq__ support + TBD: rationalise self.coords() logic with other manager and Cube + + """ + + REQUIRED = ( + "node_x", + "node_y", + ) + OPTIONAL = ( + "edge_x", + "edge_y", + ) + + def __init__(self, node_x, node_y, edge_x=None, edge_y=None): + # initialise all the coordinates + self.ALL = self.REQUIRED + self.OPTIONAL + self._members = {member: None for member in self.ALL} + + # required coordinates + self.node_x = node_x + self.node_y = node_y + # optional coordinates + self.edge_x = edge_x + self.edge_y = edge_y + + def __eq__(self, other): + # TBD + return NotImplemented + + def __getstate__(self): + # TBD + pass + + def __iter__(self): + for item in self._members.items(): + yield item + + def __ne__(self, other): + # TBD + return NotImplemented + + def __repr__(self): + args = [ + f"{member}={coord!r}" + for member, coord in self + if coord is not None + ] + return f"{self.__class__.__name__}({', '.join(args)})" + + def __setstate__(self, state): + # TBD + pass + + def __str__(self): + args = [ + f"{member}=True" for member, coord in self if coord is not None + ] + return f"{self.__class__.__name__}({', '.join(args)})" + + @staticmethod + def _filters( + members, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + ): + """ + TDB: support coord_systems? + + """ + name = None + coord = None + + if isinstance(item, str): + name = item + else: + coord = item + + if name is not None: + members = {k: v for k, v in members.items() if v.name() == name} + + if standard_name is not None: + members = { + k: v + for k, v in members.items() + if v.standard_name == standard_name + } + + if long_name is not None: + members = { + k: v for k, v in members.items() if v.long_name == long_name + } + + if var_name is not None: + members = { + k: v for k, v in members.items() if v.var_name == var_name + } + + if axis is not None: + axis = axis.upper() + members = { + k: v for k, v in members.items() if guess_coord_axis(v) == axis + } + + if attributes is not None: + if not isinstance(attributes, Mapping): + emsg = ( + "The attributes keyword was expecting a dictionary " + f"type, but got a {type(attributes)} instead." + ) + raise ValueError(emsg) + + def _filter(coord): + return all( + k in coord.attributes + and _hexdigest(coord.attributes[k]) == _hexdigest(v) + for k, v in attributes.items() + ) + + members = {k: v for k, v in members.items() if _filter(v)} + + if coord is not None: + if hasattr(coord, "__class__") and coord.__class__ in ( + CoordMetadata, + DimCoordMetadata, + ): + target_metadata = coord + else: + target_metadata = coord.metadata + + members = { + k: v + for k, v in members.items() + if v.metadata == target_metadata + } + + return members + + def _remove(self, **kwargs): + result = {} + members = self.filters(**kwargs) + + for member in members.keys(): + if member in self.REQUIRED: + dmsg = f"Ignoring request to remove required coordinate {member!r}" + logger.debug(dmsg, extra=dict(cls=self.__class__.__name__)) + else: + result[member] = members[member] + setattr(self, member, None) + + return result + + def _setter(self, location, axis, coord, shape): + axis = axis.lower() + member = f"{location}_{axis}" + + # enforce the UGRID minimum coordinate requirement + if location == "node" and coord is None: + emsg = ( + f"{member!r} is a required coordinate, cannot set to 'None'." + ) + raise ValueError(emsg) + + if coord is not None: + if not isinstance(coord, AuxCoord): + emsg = f"{member!r} requires to be an 'AuxCoord', got {type(coord)}." + raise TypeError(emsg) + + guess_axis = guess_coord_axis(coord) + + if guess_axis and guess_axis.lower() != axis: + emsg = f"{member!r} requires a {axis}-axis like 'AuxCoord', got a {guess_axis.lower()}-axis like." + raise TypeError(emsg) + + if coord.climatological: + emsg = f"{member!r} cannot be a climatological 'AuxCoord'." + raise TypeError(emsg) + + if shape is not None and coord.shape != shape: + emsg = f"{member!r} requires to have shape {shape!r}, got {coord.shape!r}." + raise ValueError(emsg) + + self._members[member] = coord + + def _shape(self, location): + coord = getattr(self, f"{location}_x") + shape = coord.shape if coord is not None else None + if shape is None: + coord = getattr(self, f"{location}_y") + if coord is not None: + shape = coord.shape + return shape + + @property + def _edge_shape(self): + return self._shape(location="edge") + + @property + def _node_shape(self): + return self._shape(location="node") + + @property + def all_members(self): + return Mesh1DCoords(**self._members) + + @property + def edge_coords(self): + return MeshEdgeCoords(edge_x=self.edge_x, edge_y=self.edge_y) + + @property + def edge_x(self): + return self._members["edge_x"] + + @edge_x.setter + def edge_x(self, coord): + self._setter( + location="edge", axis="x", coord=coord, shape=self._edge_shape + ) + + @property + def edge_y(self): + return self._members["edge_y"] + + @edge_y.setter + def edge_y(self, coord): + self._setter( + location="edge", axis="y", coord=coord, shape=self._edge_shape + ) + + @property + def node_coords(self): + return MeshNodeCoords(node_x=self.node_x, node_y=self.node_y) + + @property + def node_x(self): + return self._members["node_x"] + + @node_x.setter + def node_x(self, coord): + self._setter( + location="node", axis="x", coord=coord, shape=self._node_shape + ) + + @property + def node_y(self): + return self._members["node_y"] + + @node_y.setter + def node_y(self, coord): + self._setter( + location="node", axis="y", coord=coord, shape=self._node_shape + ) + + def _add(self, coords): + member_x, member_y = coords._fields + + # deal with the special case where both members are changing + if coords[0] is not None and coords[1] is not None: + cache_x = self._members[member_x] + cache_y = self._members[member_y] + self._members[member_x] = None + self._members[member_y] = None + + try: + setattr(self, member_x, coords[0]) + setattr(self, member_y, coords[1]) + except (TypeError, ValueError): + # restore previous valid state + self._members[member_x] = cache_x + self._members[member_y] = cache_y + # now, re-raise the exception + raise + else: + # deal with the case where one or no member is changing + if coords[0] is not None: + setattr(self, member_x, coords[0]) + if coords[1] is not None: + setattr(self, member_y, coords[1]) + + def add(self, node_x=None, node_y=None, edge_x=None, edge_y=None): + """ + use self.remove(edge_x=True) to remove a coordinate e.g., using the + pattern self.add(edge_x=None) will not remove the edge_x coordinate + + """ + self._add(MeshNodeCoords(node_x, node_y)) + self._add(MeshEdgeCoords(edge_x, edge_y)) + + def filter(self, **kwargs): + result = self.filters(**kwargs) + + if len(result) > 1: + names = ", ".join( + f"{member}={coord!r}" for member, coord in result.items() + ) + emsg = ( + f"Expected to find exactly 1 coordinate, but found {len(result)}. " + f"They were: {names}." + ) + raise CoordinateNotFoundError(emsg) + + if len(result) == 0: + item = kwargs["item"] + if item is not None: + if not isinstance(item, str): + item = item.name() + name = ( + item + or kwargs["standard_name"] + or kwargs["long_name"] + or kwargs["var_name"] + or None + ) + name = "" if name is None else f"{name!r} " + emsg = ( + f"Expected to find exactly 1 {name}coordinate, but found none." + ) + raise CoordinateNotFoundError(emsg) + + return result + + def filters( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + node=None, + edge=None, + face=None, + ): + # rationalise the tri-state behaviour + args = [node, edge, face] + state = not any(set(filter(lambda arg: arg is not None, args))) + node, edge, face = map( + lambda arg: arg if arg is not None else state, args + ) + + def func(args): + return args[1] is not None + + members = {} + if node: + members.update( + dict(filter(func, self.node_coords._asdict().items())) + ) + if edge: + members.update( + dict(filter(func, self.edge_coords._asdict().items())) + ) + if hasattr(self, "face_coords"): + if face: + members.update( + dict(filter(func, self.face_coords._asdict().items())) + ) + else: + dmsg = "Ignoring request to filter non-existent 'face_coords'" + logger.debug(dmsg, extra=dict(cls=self.__class__.__name__)) + + result = self._filters( + members, + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + ) + + return result + + def remove( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + node=None, + edge=None, + ): + return self._remove( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + node=node, + edge=edge, + ) + + +class _Mesh2DCoordinateManager(_Mesh1DCoordinateManager): + OPTIONAL = ( + "edge_x", + "edge_y", + "face_x", + "face_y", + ) + + def __init__( + self, + node_x, + node_y, + edge_x=None, + edge_y=None, + face_x=None, + face_y=None, + ): + super().__init__(node_x, node_y, edge_x=edge_x, edge_y=edge_y) + + # optional coordinates + self.face_x = face_x + self.face_y = face_y + + @property + def _face_shape(self): + return self._shape(location="face") + + @property + def all_members(self): + return Mesh2DCoords(**self._members) + + @property + def face_coords(self): + return MeshFaceCoords(face_x=self.face_x, face_y=self.face_y) + + @property + def face_x(self): + return self._members["face_x"] + + @face_x.setter + def face_x(self, coord): + self._setter( + location="face", axis="x", coord=coord, shape=self._face_shape + ) + + @property + def face_y(self): + return self._members["face_y"] + + @face_y.setter + def face_y(self, coord): + self._setter( + location="face", axis="y", coord=coord, shape=self._face_shape + ) + + def add( + self, + node_x=None, + node_y=None, + edge_x=None, + edge_y=None, + face_x=None, + face_y=None, + ): + super().add(node_x=node_x, node_y=node_y, edge_x=edge_x, edge_y=edge_y) + self._add(MeshFaceCoords(face_x, face_y)) + + def remove( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + node=None, + edge=None, + face=None, + ): + return self._remove( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + node=node, + edge=edge, + face=face, + ) + + # # keep an eye on the __init__ inheritance # class _Mesh1DConnectivityManager: # REQUIRED = ( From eb7dfc3e5da88fe0e2ff1a48b8ce6950e7fb9c9a Mon Sep 17 00:00:00 2001 From: Bill Little Date: Mon, 22 Feb 2021 14:50:20 +0000 Subject: [PATCH 07/22] Mesh data model to ng vat mesh api (#4023) * Update mesh-data-model branch (#4009) * Add abstract cube summary (#3987) Co-authored-by: stephen.worsley * add nox session conda list (#3990) * Added text to state the Python version used to build the docs. (#3989) * Added text to state the Python version used to build the docs. * Added footer template that includes the Python version used to build. * added new line * Review actions * added whatsnew * Iris py38 (#3976) * support for py38 * update CI and noxfile * enforce alphabetical xml element attribute order * full tests for py38 + fix docs-tests * add whatsnew entry * update doc-strings + review actions * Alternate xml handling routine (#29) * all xml tests pass for nox tests-3.8 * restored docstrings * move sort_xml_attrs * make sort_xml_attrs a classmethod * update sort_xml_attr doc-string Co-authored-by: Bill Little * add jamesp to whatsnew + minor tweak Co-authored-by: James Penn * normalise version to implicit development release number (#3991) * Gallery: update COP maps example (#3934) * update cop maps example * comment tweaks * minor comment tweak + whatsnew * reinstate whatsnew addition * remove duplicate whatsnew * don't support mpl v1.2 (#3941) * Cubesummary tidy (#3988) * Extra tests; fix for array attributes. * Docstring for CubeSummary, and remove some unused parts. * Fix section name capitalisation, in line with existing cube summary. * Handle array differences; quote strings in extras and if 'awkward'-printing. * Ensure scalar string coord 'content' prints on one line. * update intersphinx mapping and matplotlib urls (#4003) * update intersphinx mapping and matplotlib urls * use matplotlib intersphinx where possible * review actions * review actions * update readme badges (#4004) * update readme badges * pimp twitter badge * update readme logo img src and href (#4006) * update setuptools description (#4008) Co-authored-by: Patrick Peglar Co-authored-by: stephen.worsley Co-authored-by: tkknight <2108488+tkknight@users.noreply.github.com> Co-authored-by: James Penn Co-authored-by: Ruth Comer * Master to mesh data model (#4022) * Add abstract cube summary (#3987) Co-authored-by: stephen.worsley * add nox session conda list (#3990) * Added text to state the Python version used to build the docs. (#3989) * Added text to state the Python version used to build the docs. * Added footer template that includes the Python version used to build. * added new line * Review actions * added whatsnew * Iris py38 (#3976) * support for py38 * update CI and noxfile * enforce alphabetical xml element attribute order * full tests for py38 + fix docs-tests * add whatsnew entry * update doc-strings + review actions * Alternate xml handling routine (#29) * all xml tests pass for nox tests-3.8 * restored docstrings * move sort_xml_attrs * make sort_xml_attrs a classmethod * update sort_xml_attr doc-string Co-authored-by: Bill Little * add jamesp to whatsnew + minor tweak Co-authored-by: James Penn * normalise version to implicit development release number (#3991) * Gallery: update COP maps example (#3934) * update cop maps example * comment tweaks * minor comment tweak + whatsnew * reinstate whatsnew addition * remove duplicate whatsnew * don't support mpl v1.2 (#3941) * Cubesummary tidy (#3988) * Extra tests; fix for array attributes. * Docstring for CubeSummary, and remove some unused parts. * Fix section name capitalisation, in line with existing cube summary. * Handle array differences; quote strings in extras and if 'awkward'-printing. * Ensure scalar string coord 'content' prints on one line. * update intersphinx mapping and matplotlib urls (#4003) * update intersphinx mapping and matplotlib urls * use matplotlib intersphinx where possible * review actions * review actions * update readme badges (#4004) * update readme badges * pimp twitter badge * update readme logo img src and href (#4006) * update setuptools description (#4008) * cirrus-ci compute credits (#4007) * update release process (#4010) * Stop using deprecated aliases of builtin types (#3997) * Stopped using deprecated aliases of builtin types. This is required to avoid warnings starting with NumPy 1.20.0. * Update lib/iris/tests/test_cell.py Co-authored-by: Bill Little * Update lib/iris/tests/test_cell.py Co-authored-by: Bill Little * Updated whatsnew. Co-authored-by: Bill Little * celebrate first time iris contributors (#4013) * Docs unreleased banner (#3999) * baseline * removed debug comments * reverted * remove line * Testing * testing extensions * testing rtd_version * fixed if * removed line * tidy up * tidy comments * debug of pre-existing rtd variables * added reminder * testing * testing still * updated comments * added whatsnew * expanded the if conditiion * review actions * Update layout.html Remove alternative banner that used the RestructuredText notation. * review actions * drop __unicode__ method usage (#4018) * cirrus-ci conditional tasks (#4019) * cirrus-ci conditional tasks * use bc for bash arithmetic * revert back to sed * use expr * reword * minor documentation changes * review actions * make iris.common.metadata._hexdigest public (#4020) Co-authored-by: Patrick Peglar Co-authored-by: stephen.worsley Co-authored-by: tkknight <2108488+tkknight@users.noreply.github.com> Co-authored-by: James Penn Co-authored-by: Ruth Comer Co-authored-by: Alexander Kuhn-Regnier Co-authored-by: Patrick Peglar Co-authored-by: stephen.worsley Co-authored-by: tkknight <2108488+tkknight@users.noreply.github.com> Co-authored-by: James Penn Co-authored-by: Ruth Comer Co-authored-by: Alexander Kuhn-Regnier --- .cirrus.yml | 80 ++++++++++----- docs/src/_static/theme_override.css | 14 +++ docs/src/_templates/layout.html | 22 +++++ docs/src/common_links.inc | 2 + docs/src/conf.py | 10 +- .../contributing_ci_tests.rst | 74 ++++++++++---- .../contributing_code_formatting.rst | 2 - docs/src/developers_guide/release.rst | 97 +++++++++++-------- docs/src/whatsnew/latest.rst | 28 ++++++ lib/iris/_constraints.py | 2 +- lib/iris/_representation.py | 4 +- lib/iris/analysis/__init__.py | 2 +- lib/iris/analysis/_regrid.py | 2 +- lib/iris/common/metadata.py | 67 +++++++------ lib/iris/cube.py | 3 - lib/iris/experimental/regrid.py | 4 +- lib/iris/fileformats/_ff.py | 2 +- lib/iris/fileformats/pp.py | 2 +- lib/iris/fileformats/pp_load_rules.py | 2 +- lib/iris/tests/__init__.py | 12 +-- .../test_regrid_conservative_via_esmpy.py | 6 +- lib/iris/tests/test_cell.py | 4 +- lib/iris/tests/test_concatenate.py | 8 +- lib/iris/tests/test_merge.py | 4 +- .../analysis/cartography/test_rotate_winds.py | 2 +- .../regrid/test_RectilinearRegridder.py | 30 +++--- .../unit/aux_factory/test_OceanSFactory.py | 4 +- .../unit/aux_factory/test_OceanSg1Factory.py | 4 +- .../unit/aux_factory/test_OceanSg2Factory.py | 4 +- .../aux_factory/test_OceanSigmaFactory.py | 4 +- .../aux_factory/test_OceanSigmaZFactory.py | 8 +- .../unit/common/metadata/test_BaseMetadata.py | 4 +- .../{test__hexdigest.py => test_hexdigest.py} | 24 ++--- ...rid_weighted_curvilinear_to_rectilinear.py | 4 +- .../unit/fileformats/cf/test_CFReader.py | 46 +++++---- .../tests/unit/fileformats/pp/test_PPField.py | 8 +- .../pp/test__data_bytes_to_shaped_array.py | 4 +- .../unit/lazy_data/test_lazy_elementwise.py | 2 +- 38 files changed, 383 insertions(+), 218 deletions(-) rename lib/iris/tests/unit/common/metadata/{test__hexdigest.py => test_hexdigest.py} (90%) diff --git a/.cirrus.yml b/.cirrus.yml index da425a5691..0a7c972821 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -1,5 +1,6 @@ # Reference: # - https://cirrus-ci.org/guide/writing-tasks/ +# - https://cirrus-ci.org/guide/writing-tasks/#environment-variables # - https://cirrus-ci.org/guide/tips-and-tricks/#sharing-configuration-between-tasks # - https://cirrus-ci.org/guide/linux/ # - https://cirrus-ci.org/guide/macOS/ @@ -17,6 +18,16 @@ container: env: + # Skip specific tasks by name. Set to a non-empty string to skip. + SKIP_LINT_TASK: "" + SKIP_TEST_MINIMAL_TASK: "" + SKIP_TEST_FULL_TASK: "" + SKIP_GALLERY_TASK: "" + SKIP_DOCTEST_TASK: "" + SKIP_LINKCHECK_TASK: "" + # Skip task groups by type. Set to a non-empty string to skip. + SKIP_ALL_TEST_TASKS: "" + SKIP_ALL_DOC_TASKS: "" # Maximum cache period (in weeks) before forcing a new cache upload. CACHE_PERIOD: "2" # Increment the build number to force new cartopy cache upload. @@ -35,25 +46,6 @@ env: IRIS_TEST_DATA_DIR: ${HOME}/iris-test-data -# -# Linting -# -lint_task: - auto_cancellation: true - name: "${CIRRUS_OS}: flake8 and black" - pip_cache: - folder: ~/.cache/pip - fingerprint_script: - - echo "${CIRRUS_TASK_NAME}" - - echo "$(date +%Y).$(($(date +%U) / ${CACHE_PERIOD})):${PIP_CACHE_BUILD} ${PIP_CACHE_PACKAGES}" - lint_script: - - pip list - - python -m pip install --retries 3 --upgrade ${PIP_CACHE_PACKAGES} - - pip list - - nox --session flake8 - - nox --session black - - # # YAML alias for common linux test infra-structure. # @@ -68,7 +60,7 @@ linux_task_template: &LINUX_TASK_TEMPLATE fingerprint_script: - wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh - echo "${CIRRUS_OS} $(sha256sum miniconda.sh)" - - echo "$(date +%Y).$(($(date +%U) / ${CACHE_PERIOD})):${CONDA_CACHE_BUILD}" + - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${CONDA_CACHE_BUILD}" populate_script: - bash miniconda.sh -b -p ${HOME}/miniconda - conda config --set always_yes yes --set changeps1 no @@ -80,19 +72,49 @@ linux_task_template: &LINUX_TASK_TEMPLATE folder: ${HOME}/.local/share/cartopy fingerprint_script: - echo "${CIRRUS_OS}" - - echo "$(date +%Y).$(($(date +%U) / ${CACHE_PERIOD})):${CARTOPY_CACHE_BUILD}" + - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${CARTOPY_CACHE_BUILD}" nox_cache: folder: ${CIRRUS_WORKING_DIR}/.nox fingerprint_script: - echo "${CIRRUS_TASK_NAME}" - - echo "$(date +%Y).$(($(date +%U) / ${CACHE_PERIOD})):${NOX_CACHE_BUILD}" + - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${NOX_CACHE_BUILD}" - sha256sum ${CIRRUS_WORKING_DIR}/requirements/ci/py$(echo ${PY_VER} | tr -d ".").yml +# +# YAML alias for compute credits +# +compute_credits_template: &CREDITS_TEMPLATE + # Only use credits for non-DRAFT pull-requests to SciTools/iris master branch by collaborators + use_compute_credits: $CIRRUS_REPO_FULL_NAME == 'SciTools/iris' && $CIRRUS_USER_COLLABORATOR == 'true' && $CIRRUS_PR_DRAFT == 'false' && $CIRRUS_BASE_BRANCH == 'master' && $CIRRUS_PR != '' + +# +# Linting +# +lint_task: + only_if: $SKIP_LINT_TASK == "" + << : *CREDITS_TEMPLATE + auto_cancellation: true + name: "${CIRRUS_OS}: flake8 and black" + pip_cache: + folder: ~/.cache/pip + fingerprint_script: + - echo "${CIRRUS_TASK_NAME}" + - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${PIP_CACHE_BUILD} ${PIP_CACHE_PACKAGES}" + lint_script: + - pip list + - python -m pip install --retries 3 --upgrade ${PIP_CACHE_PACKAGES} + - pip list + - nox --session flake8 + - nox --session black + + # # Testing Minimal (Linux) # -linux_minimal_task: +test_minimal_task: + only_if: $SKIP_TEST_MINIMAL_TASK == "" && $SKIP_ALL_TEST_TASKS == "" + << : *CREDITS_TEMPLATE matrix: env: PY_VER: 3.6 @@ -115,7 +137,9 @@ linux_minimal_task: # # Testing Full (Linux) # -linux_task: +test_full_task: + only_if: $SKIP_TEST_FULL_TASK == "" && $SKIP_ALL_TEST_TASKS == "" + << : *CREDITS_TEMPLATE matrix: env: PY_VER: 3.6 @@ -148,6 +172,8 @@ linux_task: # Testing Documentation Gallery (Linux) # gallery_task: + only_if: $SKIP_GALLERY_TASK == "" && $SKIP_ALL_DOC_TASKS == "" + << : *CREDITS_TEMPLATE matrix: env: PY_VER: 3.8 @@ -176,6 +202,8 @@ gallery_task: # Testing Documentation (Linux) # doctest_task: + only_if: $SKIP_DOCTEST_TASK == "" && $SKIP_ALL_DOC_TASKS == "" + << : *CREDITS_TEMPLATE matrix: env: PY_VER: 3.8 @@ -209,7 +237,9 @@ doctest_task: # # Testing Documentation Link Check (Linux) # -link_task: +linkcheck_task: + only_if: $SKIP_LINKCHECK_TASK == "" && $SKIP_ALL_DOC_TASKS == "" + << : *CREDITS_TEMPLATE matrix: env: PY_VER: 3.8 diff --git a/docs/src/_static/theme_override.css b/docs/src/_static/theme_override.css index 5edc286630..c56b720f69 100644 --- a/docs/src/_static/theme_override.css +++ b/docs/src/_static/theme_override.css @@ -26,3 +26,17 @@ table.docutils td { word-wrap: break-word; } +/* Used for very strong warning */ +#slim-red-box-message { + background: #ff0000; + box-sizing: border-box; + color: #ffffff; + font-weight: normal; + padding: 0.5em; +} + +#slim-red-box-message a { + color: #ffffff; + font-weight: normal; + text-decoration:underline; +} diff --git a/docs/src/_templates/layout.html b/docs/src/_templates/layout.html index 9b4983697e..96a2e0913e 100644 --- a/docs/src/_templates/layout.html +++ b/docs/src/_templates/layout.html @@ -1,5 +1,27 @@ {% extends "!layout.html" %} +{# This uses blocks. See: + https://www.sphinx-doc.org/en/master/templating.html +#} + +/*---------------------------------------------------------------------------*/ + +{%- block document %} + {% if READTHEDOCS and rtd_version == 'latest' %} +

+ You are viewing the latest unreleased documentation + v{{ version }}. You may prefer a + stable + version. +
+

+ {%- endif %} + + {{ super() }} +{%- endblock %} + +/*-----------------------------------------------------z----------------------*/ + {% block menu %} {{ super() }} diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index 3c465b67dc..d9df15be8b 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -3,6 +3,7 @@ .. _black: https://black.readthedocs.io/en/stable/ .. _.cirrus.yml: https://github.com/SciTools/iris/blob/master/.cirrus.yml +.. _flake8: https://flake8.pycqa.org/en/stable/ .. _.flake8.yml: https://github.com/SciTools/iris/blob/master/.flake8 .. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris .. _conda: https://docs.conda.io/en/latest/ @@ -24,6 +25,7 @@ .. _New Issue: https://github.com/scitools/iris/issues/new/choose .. _pull request: https://github.com/SciTools/iris/pulls .. _pull requests: https://github.com/SciTools/iris/pulls +.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/ .. _readthedocs.yml: https://github.com/SciTools/iris/blob/master/requirements/ci/readthedocs.yml .. _SciTools: https://github.com/SciTools .. _sphinx: https://www.sphinx-doc.org/en/master/ diff --git a/docs/src/conf.py b/docs/src/conf.py index 9bab5850b8..ab05312fca 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -43,6 +43,9 @@ def autolog(message): for item, value in os.environ.items(): autolog("[READTHEDOCS] {} = {}".format(item, value)) +# This is the rtd reference to the version, such as: latest, stable, v3.0.1 etc +# For local testing purposes this could be explicitly set latest or stable. +rtd_version = os.environ.get("READTHEDOCS_VERSION") # -- Path setup -------------------------------------------------------------- @@ -131,7 +134,6 @@ def autolog(message): "custom_data_autodoc", "generate_package_rst", ] - # -- panels extension --------------------------------------------------------- # See https://sphinx-panels.readthedocs.io/en/latest/ @@ -165,7 +167,7 @@ def autolog(message): # See https://sphinx-copybutton.readthedocs.io/en/latest/ copybutton_prompt_text = ">>> " -# sphinx.ext.todo configuration +# sphinx.ext.todo configuration ----------------------------------------------- # See https://www.sphinx-doc.org/en/master/usage/extensions/todo.html todo_include_todos = True @@ -228,6 +230,8 @@ def autolog(message): } html_context = { + "rtd_version": rtd_version, + "version": version, "copyright_years": copyright_years, "python_version": build_python_version, # menu_links and menu_links_name are used in _templates/layout.html @@ -296,7 +300,6 @@ def autolog(message): "ignore_pattern": r"__init__\.py", } - # ----------------------------------------------------------------------------- # Remove matplotlib agg warnings from generated doc when using plt.show warnings.filterwarnings( @@ -306,7 +309,6 @@ def autolog(message): " non-GUI backend, so cannot show the figure.", ) - # -- numfig options (built-in) ------------------------------------------------ # Enable numfig. numfig = True diff --git a/docs/src/developers_guide/contributing_ci_tests.rst b/docs/src/developers_guide/contributing_ci_tests.rst index a6bdac4ae0..8594612fe1 100644 --- a/docs/src/developers_guide/contributing_ci_tests.rst +++ b/docs/src/developers_guide/contributing_ci_tests.rst @@ -5,9 +5,9 @@ Continuous Integration (CI) Testing =================================== -The `Iris`_ GitHub repository is configured to run checks on the code -automatically when a pull request is created, updated or merged against -Iris **master**. The checks performed are: +The `Iris`_ GitHub repository is configured to run checks against all its +branches automatically whenever a pull request is created, updated or merged. +The checks performed are: * :ref:`testing_cla` * :ref:`testing_cirrus` @@ -18,9 +18,9 @@ Iris **master**. The checks performed are: SciTools CLA Checker ******************** -A bot that checks the user who created the pull request has signed the -**Contributor's License Agreement (CLA)**. For more information on this this -please see https://scitools.org.uk/organisation.html#governance +A bot which checks that the GitHub author of the pull request has signed the +**SciTools Contributor's License Agreement (CLA)**. For more information on +this please see https://scitools.org.uk/organisation.html#governance. .. _testing_cirrus: @@ -28,19 +28,55 @@ please see https://scitools.org.uk/organisation.html#governance Cirrus-CI ********* -The unit and integration tests in Iris are an essential mechanism to ensure +Iris unit and integration tests are an essential mechanism to ensure that the Iris code base is working as expected. :ref:`developer_running_tests` -may be run manually but to ensure the checks are performed a -continuous integration testing tool named `cirrus-ci`_ is used. +may be performed manually by a developer locally. However Iris is configured to +use the `cirrus-ci`_ service for automated Continuous Integration (CI) testing. -A `cirrus-ci`_ configuration file named `.cirrus.yml`_ -is in the Iris repository which tells Cirrus-CI what commands to run. The -commands include retrieving the Iris code base and associated test files using -conda and then running the tests. `cirrus-ci`_ allows for a matrix of tests to -be performed to ensure that all expected variations test successfully. +The `cirrus-ci`_ configuration file `.cirrus.yml`_ in the root of the Iris repository +defines the tasks to be performed by `cirrus-ci`_. For further details +refer to the `Cirrus-CI Documentation`_. The tasks performed during CI include: + +* linting the code base and ensuring it adheres to the `black`_ format +* running the system, integration and unit tests for Iris +* ensuring the documentation gallery builds successfully +* performing all doc-tests within the code base +* checking all URL references within the code base and documentation are valid + +The above `cirrus-ci`_ tasks are run automatically against all `Iris`_ branches +on GitHub whenever a pull request is submitted, updated or merged. See the +`Cirrus-CI Dashboard`_ for details of recent past and active Iris jobs. + +.. _skipping Cirrus-CI tasks: + +Skipping Cirrus-CI Tasks +------------------------ + +As a developer you may wish to not run all the CI tasks when you are actively +developing e.g., you are writing documentation and there is no need for linting, +or long running compute intensive testing tasks to be executed. + +As a convenience, it is possible to easily skip one or more tasks by setting +the appropriate environment variable within the `.cirrus.yml`_ file to a +**non-empty** string: + +* ``SKIP_LINT_TASK`` to skip `flake8`_ linting and `black`_ formatting +* ``SKIP_TEST_MINIMAL_TASK`` to skip restricted unit and integration testing +* ``SKIP_TEST_FULL_TASK`` to skip full unit and integration testing +* ``SKIP_GALLERY_TASK`` to skip building the documentation gallery +* ``SKIP_DOCTEST_TASK`` to skip running the documentation doc-tests +* ``SKIP_LINKCHECK_TASK`` to skip checking for broken documentation URL references +* ``SKIP_ALL_TEST_TASKS`` which is equivalent to setting ``SKIP_TEST_MINIMAL_TASK`` and ``SKIP_TEST_FULL_TASK`` +* ``SKIP_ALL_DOC_TASKS`` which is equivalent to setting ``SKIP_GALLERY_TASK``, ``SKIP_DOCTEST_TASK``, and ``SKIP_LINKCHECK_TASK`` + +e.g., to skip the linting task, the following are all equivalent:: + + SKIP_LINT_TASK: "1" + SKIP_LINT_TASK: "true" + SKIP_LINT_TASK: "false" + SKIP_LINT_TASK: "skip" + SKIP_LINT_TASK: "unicorn" -The `cirrus-ci`_ tests are run automatically against the `Iris`_ master -repository when a pull request is submitted, updated or merged. GitHub Checklist **************** @@ -50,6 +86,10 @@ passing: .. image:: ci_checks.png -If any CI checks fail, then the pull request is unlikely to be merged to the +If any CI tasks fail, then the pull request is unlikely to be merged to the Iris target branch by a core developer. + +.. _Cirrus-CI Dashboard: https://cirrus-ci.com/github/SciTools/iris +.. _Cirrus-CI Documentation: https://cirrus-ci.org/guide/writing-tasks/ + diff --git a/docs/src/developers_guide/contributing_code_formatting.rst b/docs/src/developers_guide/contributing_code_formatting.rst index 6bf8dca717..1a3573d135 100644 --- a/docs/src/developers_guide/contributing_code_formatting.rst +++ b/docs/src/developers_guide/contributing_code_formatting.rst @@ -58,6 +58,4 @@ will look similar to:: their officially documentation for more information. -.. _black: https://black.readthedocs.io/en/stable/ -.. _flake8: https://flake8.pycqa.org/en/stable/ .. _pre-commit: https://pre-commit.com/ diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index 56328f910f..90938b32d3 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -3,8 +3,7 @@ Releases ======== -A release of Iris is a `tag on the SciTools/Iris`_ -Github repository. +A release of Iris is a `tag on the SciTools/Iris`_ Github repository. The summary below is of the main areas that constitute the release. The final section details the :ref:`iris_development_releases_steps` to take. @@ -24,8 +23,8 @@ number of releases, is in :ref:`iris_development_deprecations`. Release Branch -------------- -Once the features intended for the release are on master, a release branch -should be created, in the SciTools/Iris repository. This will have the name: +Once the features intended for the release are on ``master``, a release branch +should be created, in the ``SciTools/iris`` repository. This will have the name: :literal:`v{major release number}.{minor release number}.x` @@ -41,10 +40,10 @@ Release Candidate ----------------- Prior to a release, a release candidate tag may be created, marked as a -pre-release in github, with a tag ending with :literal:`rc` followed by a -number, e.g.: +pre-release in GitHub, with a tag ending with :literal:`rc` followed by a +number (0-based), e.g.,: - :literal:`v1.9.0rc1` + :literal:`v1.9.0rc0` If created, the pre-release shall be available for a minimum of two weeks prior to the release being cut. However a 4 week period should be the goal @@ -57,11 +56,16 @@ point release. If new features are required for a release after a release candidate has been cut, a new pre-release shall be issued first. +Make the release candidate available as a conda package on the +`conda-forge Anaconda channel`_ using the `rc_iris`_ label. To do this visit +the `conda-forge iris-feedstock`_ and follow `CFEP-05`_. For further information +see the `conda-forge User Documentation`_. + Documentation ------------- -The documentation should include all of the what's new entries for the release. +The documentation should include all of the ``whatsnew`` entries for the release. This content should be reviewed and adapted as required. Steps to achieve this can be found in the :ref:`iris_development_releases_steps`. @@ -70,50 +74,48 @@ Steps to achieve this can be found in the :ref:`iris_development_releases_steps` The Release ----------- -The final steps are to change the version string in the source of -:literal:`Iris.__init__.py` and include the release date in the relevant what's -new page within the documentation. +The final steps of the release are to change the version string ``__version__`` +in the source of :literal:`iris.__init__.py` and ensure the release date and details +are correct in the relevant ``whatsnew`` page within the documentation. Once all checks are complete, the release is cut by the creation of a new tag -in the SciTools Iris repository. +in the ``SciTools/iris`` repository. Conda Recipe ------------ -Once a release is cut, the `Iris feedstock`_ for the conda recipe must be -updated to build the latest release of Iris and push this artefact to -`conda forge`_. +Once a release is cut on GitHub, update the Iris conda recipe on the +`conda-forge iris-feedstock`_ for the release. This will build and publish the +conda package on the `conda-forge Anaconda channel`_. -.. _Iris feedstock: https://github.com/conda-forge/iris-feedstock/tree/master/recipe -.. _conda forge: https://anaconda.org/conda-forge/iris Merge Back ---------- -After the release is cut, the changes shall be merged back onto the -Scitools/iris master branch. +After the release is cut, the changes from the release branch should be merged +back onto the ``SciTools/iris`` ``master`` branch. -To achieve this, first cut a local branch from the release branch, -:literal:`{release}.x`. Next add a commit changing the release string to match -the release string on scitools/master. This branch can now be proposed as a -pull request to master. This work flow ensures that the commit identifiers are -consistent between the :literal:`.x` branch and :literal:`master`. +To achieve this, first cut a local branch from the latest ``master`` branch, +and `git merge` the :literal:`.x` release branch into it. Ensure that the +``iris.__version__``, ``docs/src/whatsnew/index.rst`` and ``docs/src/whatsnew/latest.rst`` +are correct, before committing these changes and then proposing a pull-request +on the ``master`` branch of ``SciTools/iris``. Point Releases -------------- -Bug fixes may be implemented and targeted as the :literal:`.x` branch. These -should lead to a new point release, another tag. For example, a fix for a -problem with 1.9.0 will be merged into 1.9.x, and then released by tagging -1.9.1. +Bug fixes may be implemented and targeted on the :literal:`.x` release branch. +These should lead to a new point release, and another tag. For example, a fix +for a problem with the ``v1.9.0`` release will be merged into ``v1.9.x`` release +branch, and then released by tagging ``v1.9.1``. New features shall not be included in a point release, these are for bug fixes. A point release does not require a release candidate, but the rest of the release process is to be followed, including the merge back of changes into -:literal:`master`. +``master``. .. _iris_development_releases_steps: @@ -121,19 +123,19 @@ release process is to be followed, including the merge back of changes into Maintainer Steps ---------------- -These steps assume a release for ``v1.9`` is to be created. +These steps assume a release for ``1.9.0`` is to be created. Release Steps ~~~~~~~~~~~~~ -#. Create the release feature branch ``1.9.x`` on `SciTools/iris`_. +#. Create the release feature branch ``v1.9.x`` on `SciTools/iris`_. The only exception is for a point/bugfix release, as it should already exist #. Update the ``iris.__init__.py`` version string e.g., to ``1.9.0`` -#. Update the what's new for the release: +#. Update the ``whatsnew`` for the release: - * Use git to rename ``docs/src/whatsnew/latest.rst`` to the release + * Use ``git`` to rename ``docs/src/whatsnew/latest.rst`` to the release version file ``v1.9.rst`` - * Use git to delete the ``docs/src/whatsnew/latest.rst.template`` file + * Use ``git`` to delete the ``docs/src/whatsnew/latest.rst.template`` file * In ``v1.9.rst`` remove the ``[unreleased]`` caption from the page title. Note that, the Iris version and release date are updated automatically when the documentation is built @@ -141,10 +143,10 @@ Release Steps * Work with the development team to populate the ``Release Highlights`` dropdown at the top of the file, which provides extra detail on notable changes - * Use git to add and commit all changes, including removal of + * Use ``git`` to add and commit all changes, including removal of ``latest.rst.template`` -#. Update the what's new index ``docs/src/whatsnew/index.rst`` +#. Update the ``whatsnew`` index ``docs/src/whatsnew/index.rst`` * Remove the reference to ``latest.rst`` * Add a reference to ``v1.9.rst`` to the top of the list @@ -159,20 +161,31 @@ Post Release Steps ~~~~~~~~~~~~~~~~~~ #. Check the documentation has built on `Read The Docs`_. The build is - triggered by any commit to master. Additionally check that the versions + triggered by any commit to ``master``. Additionally check that the versions available in the pop out menu in the bottom left corner include the new release version. If it is not present you will need to configure the - versions available in the **admin** dashboard in Read The Docs + versions available in the **admin** dashboard in `Read The Docs`_. +#. Review the `Active Versions`_ for the ``scitools-iris`` project on + `Read The Docs`_ to ensure that the appropriate versions are ``Active`` + and/or ``Hidden``. To do this ``Edit`` the appropriate version e.g., + see `Editing v3.0.0rc0`_. #. Copy ``docs/src/whatsnew/latest.rst.template`` to ``docs/src/whatsnew/latest.rst``. This will reset - the file with the ``unreleased`` heading and placeholders for the what's - new headings -#. Add back in the reference to ``latest.rst`` to the what's new index + the file with the ``unreleased`` heading and placeholders for the + ``whatsnew`` headings +#. Add back in the reference to ``latest.rst`` to the ``whatsnew`` index ``docs/src/whatsnew/index.rst`` #. Update ``iris.__init__.py`` version string to show as ``1.10.dev0`` -#. Merge back to master +#. Merge back to ``master`` .. _Read The Docs: https://readthedocs.org/projects/scitools-iris/builds/ .. _SciTools/iris: https://github.com/SciTools/iris .. _tag on the SciTools/Iris: https://github.com/SciTools/iris/releases +.. _conda-forge Anaconda channel: https://anaconda.org/conda-forge/iris +.. _conda-forge iris-feedstock: https://github.com/conda-forge/iris-feedstock +.. _CFEP-05: https://github.com/conda-forge/cfep/blob/master/cfep-05.md +.. _conda-forge User Documentation: https://conda-forge.org/docs/user/00_intro.html +.. _Active Versions: https://readthedocs.org/projects/scitools-iris/versions/ +.. _Editing v3.0.0rc0: https://readthedocs.org/dashboard/scitools-iris/version/v3.0.0rc0/ +.. _rc_iris: https://anaconda.org/conda-forge/iris/labels diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 68872beb64..6fcdcfb7bc 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -27,6 +27,11 @@ This document explains the changes made to Iris for this release #. Congratulations to `@jamesp`_ who recently became an Iris core developer after joining the Iris development team at the `Met Office`_. 🎉 +#. A special thanks goes to `@akuhnregnier`_, `@gcaria`_, `@jamesp`_ and + `@MHBalsmeier`_ all of whom made their first contributions to Iris, which + were gratefully received and included in this release. Keep up the awesome + work! 🍻 + ✨ Features =========== @@ -37,6 +42,12 @@ This document explains the changes made to Iris for this release ``iris.plot.plot(z_cube)`` will produce a z-vs-phenomenon plot, where before it would have produced a phenomenon-vs-z plot. (:pull:`3906`) +#. `@bjlittle`_ introduced :func:`iris.common.metadata.hexdigest` to the + public API. Previously it was a private function introduced in ``v3.0.0``. + Given any object, :func:`~iris.common.metadata.hexdigest` returns a string + representation of the 64-bit non-cryptographic hash of the object using the + extremely fast `xxhash`_ hashing algorithm. (:pull:`4020`) + 🐛 Bugs Fixed ============= @@ -91,6 +102,10 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ added the |PyPI|_ badge to the `README.md`_. (:pull:`4004`) +#. `@tkknight`_ added a banner at the top of every page of the unreleased + development documentation if being viewed on `Read the Docs`_. + (:pull:`3999`) + 💼 Internal =========== @@ -109,11 +124,22 @@ This document explains the changes made to Iris for this release each ``nox`` session to list its ``conda`` environment packages and environment info. (:pull:`3990`) +#. `@bjlittle`_ enabled `cirrus-ci`_ compute credits for non-draft pull-requests + from collaborators targeting the Iris ``master`` branch. (:pull:`4007`) + +#. `@akuhnregnier`_ replaced `deprecated numpy 1.20 aliases for builtin types`_. + (:pull:`3997`) + +#. `@bjlittle`_ added conditional task execution to `.cirrus.yml`_ to allow + developers to easily disable `cirrus-ci`_ tasks. See + :ref:`skipping Cirrus-CI tasks`. (:pull:`4019`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: +.. _@akuhnregnier: https://github.com/akuhnregnier .. _@gcaria: https://github.com/gcaria .. _@MHBalsmeier: https://github.com/MHBalsmeier @@ -122,6 +148,7 @@ This document explains the changes made to Iris for this release Whatsnew resources in alphabetical order: .. _abstract base class: https://docs.python.org/3/library/abc.html +.. _deprecated numpy 1.20 aliases for builtin types: https://numpy.org/doc/1.20/release/1.20.0-notes.html#using-the-aliases-of-builtin-types-like-np-int-is-deprecated .. _GitHub: https://github.com/SciTools/iris/issues/new/choose .. _Met Office: https://www.metoffice.gov.uk/ .. _numpy: https://numpy.org/doc/stable/release/1.20.0-notes.html @@ -129,3 +156,4 @@ This document explains the changes made to Iris for this release .. _PyPI: https://pypi.org/project/scitools-iris/ .. _Python 3.8: https://www.python.org/downloads/release/python-380/ .. _README.md: https://github.com/SciTools/iris#----- +.. _xxhash: http://cyan4973.github.io/xxHash/ diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 0f6a8ab6c6..5179d89039 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -294,7 +294,7 @@ def call_func(c): except TypeError: try_quick = False if try_quick: - r = np.zeros(coord.shape, dtype=np.bool) + r = np.zeros(coord.shape, dtype=np.bool_) if coord.cell(i) == self._coord_thing: r[i] = True else: diff --git a/lib/iris/_representation.py b/lib/iris/_representation.py index ee1e1a0d55..63974d1e50 100644 --- a/lib/iris/_representation.py +++ b/lib/iris/_representation.py @@ -9,7 +9,7 @@ import re import iris.util -from iris.common.metadata import _hexdigest as quickhash +from iris.common.metadata import hexdigest class DimensionHeader: @@ -101,7 +101,7 @@ def _summary_coord_extra(self, cube, coord): # ..except setdefault fails if values are numpy arrays. if key not in attributes: attributes[key] = value - elif quickhash(attributes[key]) != quickhash(value): + elif hexdigest(attributes[key]) != hexdigest(value): # NOTE: fast and array-safe comparison, as used in # :mod:`iris.common.metadata`. vary.add(key) diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index a049d060c2..cf9f258be1 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1454,7 +1454,7 @@ def interp_order(length): slices[-1] = endslice slices = tuple(slices) # Numpy>=1.16 : index with tuple, *not* list. - if isinstance(array.dtype, np.float): + if isinstance(array.dtype, np.float64): data = array[slices] else: # Cast non-float data type. diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 7063fdef43..b0341fafaa 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -696,7 +696,7 @@ def _regrid( if ma.isMaskedArray(src_data): data = ma.empty(shape, dtype=dtype) - data.mask = np.zeros(data.shape, dtype=np.bool) + data.mask = np.zeros(data.shape, dtype=np.bool_) else: data = np.empty(shape, dtype=dtype) diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index c27b488b3b..40dccf9428 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -37,6 +37,7 @@ "CoordMetadata", "CubeMetadata", "DimCoordMetadata", + "hexdigest", "metadata_manager_factory", ] @@ -48,34 +49,46 @@ logger = get_logger(__name__, fmt="[%(cls)s.%(funcName)s]") -def _hexdigest(value): +def hexdigest(item): """ - Return a hexidecimal string hash representation of the provided value. + Calculate a hexidecimal string hash representation of the provided item. - Calculates a 64-bit non-cryptographic hash of the provided value, - and returns the hexdigest string representation of the calculated hash. + Calculates a 64-bit non-cryptographic hash of the provided item, using + the extremely fast ``xxhash`` hashing algorithm, and returns the hexdigest + string representation of the hash. + + This provides a means to compare large and/or complex objects through + simple string hexdigest comparison. + + Args: + + * item (object): + The item that requires to have its hexdigest calculated. + + Returns: + The string hexidecimal representation of the item's 64-bit hash. """ # Special case: deal with numpy arrays. - if ma.isMaskedArray(value): + if ma.isMaskedArray(item): parts = ( - value.shape, - xxh64_hexdigest(value.data), - xxh64_hexdigest(value.mask), + item.shape, + xxh64_hexdigest(item.data), + xxh64_hexdigest(item.mask), ) - value = str(parts) - elif isinstance(value, np.ndarray): - parts = (value.shape, xxh64_hexdigest(value)) - value = str(parts) + item = str(parts) + elif isinstance(item, np.ndarray): + parts = (item.shape, xxh64_hexdigest(item)) + item = str(parts) try: # Calculate single-shot hash to avoid allocating state on the heap - result = xxh64_hexdigest(value) + result = xxh64_hexdigest(item) except TypeError: # xxhash expects a bytes-like object, so try hashing the - # string representation of the provided value instead, but + # string representation of the provided item instead, but # also fold in the object type... - parts = (type(value), value) + parts = (type(item), item) result = xxh64_hexdigest(str(parts)) return result @@ -348,8 +361,8 @@ def _combine_lenient_attributes(left, right): # Use xxhash to perform an extremely fast non-cryptographic hash of # each dictionary key rvalue, thus ensuring that the dictionary is # completely hashable, as required by a set. - sleft = {(k, _hexdigest(v)) for k, v in left.items()} - sright = {(k, _hexdigest(v)) for k, v in right.items()} + sleft = {(k, hexdigest(v)) for k, v in left.items()} + sright = {(k, hexdigest(v)) for k, v in right.items()} # Intersection of common items. common = sleft & sright # Items in sleft different from sright. @@ -377,8 +390,8 @@ def _combine_strict_attributes(left, right): # Use xxhash to perform an extremely fast non-cryptographic hash of # each dictionary key rvalue, thus ensuring that the dictionary is # completely hashable, as required by a set. - sleft = {(k, _hexdigest(v)) for k, v in left.items()} - sright = {(k, _hexdigest(v)) for k, v in right.items()} + sleft = {(k, hexdigest(v)) for k, v in left.items()} + sright = {(k, hexdigest(v)) for k, v in right.items()} # Intersection of common items. common = sleft & sright # Now bring the result together. @@ -436,8 +449,8 @@ def _compare_lenient_attributes(left, right): # Use xxhash to perform an extremely fast non-cryptographic hash of # each dictionary key rvalue, thus ensuring that the dictionary is # completely hashable, as required by a set. - sleft = {(k, _hexdigest(v)) for k, v in left.items()} - sright = {(k, _hexdigest(v)) for k, v in right.items()} + sleft = {(k, hexdigest(v)) for k, v in left.items()} + sright = {(k, hexdigest(v)) for k, v in right.items()} # Items in sleft different from sright. dsleft = dict(sleft - sright) # Items in sright different from sleft. @@ -453,8 +466,8 @@ def _compare_strict_attributes(left, right): # Use xxhash to perform an extremely fast non-cryptographic hash of # each dictionary key rvalue, thus ensuring that the dictionary is # completely hashable, as required by a set. - sleft = {(k, _hexdigest(v)) for k, v in left.items()} - sright = {(k, _hexdigest(v)) for k, v in right.items()} + sleft = {(k, hexdigest(v)) for k, v in left.items()} + sright = {(k, hexdigest(v)) for k, v in right.items()} return sleft == sright @@ -522,8 +535,8 @@ def _difference_lenient_attributes(left, right): # Use xxhash to perform an extremely fast non-cryptographic hash of # each dictionary key rvalue, thus ensuring that the dictionary is # completely hashable, as required by a set. - sleft = {(k, _hexdigest(v)) for k, v in left.items()} - sright = {(k, _hexdigest(v)) for k, v in right.items()} + sleft = {(k, hexdigest(v)) for k, v in left.items()} + sright = {(k, hexdigest(v)) for k, v in right.items()} # Items in sleft different from sright. dsleft = dict(sleft - sright) # Items in sright different from sleft. @@ -550,8 +563,8 @@ def _difference_strict_attributes(left, right): # Use xxhash to perform an extremely fast non-cryptographic hash of # each dictionary key rvalue, thus ensuring that the dictionary is # completely hashable, as required by a set. - sleft = {(k, _hexdigest(v)) for k, v in left.items()} - sright = {(k, _hexdigest(v)) for k, v in right.items()} + sleft = {(k, hexdigest(v)) for k, v in left.items()} + sright = {(k, hexdigest(v)) for k, v in right.items()} # Items in sleft different from sright. dsleft = dict(sleft - sright) # Items in sright different from sleft. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 5578507d28..a15951900b 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -2652,9 +2652,6 @@ def vector_summary( def __str__(self): return self.summary() - def __unicode__(self): - return self.summary() - def __repr__(self): return "" % self.summary( shorten=True, name_padding=1 diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index e08b71c403..7608b2608a 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -498,9 +498,9 @@ def _regrid_area_weighted_array(src_data, x_dim, y_dim, weights_info, mdtol=0): # Flag to indicate whether the original data was a masked array. src_masked = src_data.mask.any() if ma.isMaskedArray(src_data) else False if src_masked: - src_area_masks = np.full(src_areas_shape, True, dtype=np.bool) + src_area_masks = np.full(src_areas_shape, True, dtype=np.bool_) else: - new_data_mask = np.full(new_shape, False, dtype=np.bool) + new_data_mask = np.full(new_shape, False, dtype=np.bool_) # Axes of data over which the weighted mean is calculated. axis = (y_dim, x_dim) diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 1b1b2377ff..a198bb5740 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -816,7 +816,7 @@ def __iter__(self): return pp._interpret_fields(self._extract_field()) -def _parse_binary_stream(file_like, dtype=np.float, count=-1): +def _parse_binary_stream(file_like, dtype=np.float64, count=-1): """ Replacement :func:`numpy.fromfile` due to python3 performance issues. diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 406da925b1..0627bcc4a2 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -752,7 +752,7 @@ def _data_bytes_to_shaped_array( # However, we still mask any MDI values in the array (below). pass else: - land_mask = mask.data.astype(np.bool) + land_mask = mask.data.astype(np.bool_) sea_mask = ~land_mask new_data = np.ma.masked_all(land_mask.shape) new_data.fill_value = mdi diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 53d9f4dc35..ab99b5c7f8 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -580,7 +580,7 @@ def _epoch_date_hours(epoch_hours_unit, datetime): # numpy.float64. The behaviour of round is to recast this to an # int, which is not the desired behaviour for PP files. # So, cast the answer to numpy.float_ to be safe. - epoch_hours = np.float_(epoch_hours_unit.date2num(datetime)) + epoch_hours = np.float64(epoch_hours_unit.date2num(datetime)) if days_offset is not None: # Correct for any modifications to achieve a valid date. diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 4a85e5cdb2..b2eebc4f03 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -483,26 +483,26 @@ def assertDataAlmostEqual(self, data, reference_filename, **kwargs): stats.get("max", 0.0), stats.get("min", 0.0), ), - dtype=np.float_, + dtype=np.float64, ) if math.isnan(stats.get("mean", 0.0)): self.assertTrue(math.isnan(data.mean())) else: data_stats = np.array( (data.mean(), data.std(), data.max(), data.min()), - dtype=np.float_, + dtype=np.float64, ) self.assertArrayAllClose(nstats, data_stats, **kwargs) else: self._ensure_folder(reference_path) stats = collections.OrderedDict( [ - ("std", np.float_(data.std())), - ("min", np.float_(data.min())), - ("max", np.float_(data.max())), + ("std", np.float64(data.std())), + ("min", np.float64(data.min())), + ("max", np.float64(data.max())), ("shape", data.shape), ("masked", ma.is_masked(data)), - ("mean", np.float_(data.mean())), + ("mean", np.float64(data.mean())), ] ) with open(reference_path, "w") as reference_file: diff --git a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py index 5dc258dfb4..fa2da8e60c 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py @@ -675,7 +675,7 @@ def test_rotated(self): [100, 100, 100, 100, 100, 100, 100, 100, 100], [100, 100, 100, 100, 100, 100, 100, 100, 100], ], - dtype=np.float, + dtype=np.float64, ) c1_areasum = _cube_area_sum(c1) @@ -715,7 +715,7 @@ def test_rotated(self): [100, 100, 199, 199, 100], [100, 100, 199, 199, 199], ], - dtype=np.float, + dtype=np.float64, ) c2_areasum = _cube_area_sum(c2) @@ -770,7 +770,7 @@ def test_missing_data_rotated(self): [100, 100, 100, 100, 100, 100, 100, 100, 100], [100, 100, 100, 100, 100, 100, 100, 100, 100], ], - dtype=np.float, + dtype=np.float64, ) if do_add_missing: diff --git a/lib/iris/tests/test_cell.py b/lib/iris/tests/test_cell.py index d6c6ace808..4690cedfa6 100644 --- a/lib/iris/tests/test_cell.py +++ b/lib/iris/tests/test_cell.py @@ -130,7 +130,7 @@ class Terry: self.assertEqual(self.d.__ne__(Terry()), NotImplemented) def test_numpy_int_equality(self): - dtypes = (np.int, np.int16, np.int32, np.int64) + dtypes = (np.int_, np.int16, np.int32, np.int64) for dtype in dtypes: val = dtype(3) cell = iris.coords.Cell(val, None) @@ -138,7 +138,7 @@ def test_numpy_int_equality(self): def test_numpy_float_equality(self): dtypes = ( - np.float, + np.float_, np.float16, np.float32, np.float64, diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index d45a884a2f..1f6c5c3843 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -415,7 +415,7 @@ def test_concat_masked_2x2d(self): self.assertEqual(result[0].shape, (2, 4)) mask = np.array( [[True, False, False, True], [False, True, True, False]], - dtype=np.bool, + dtype=np.bool_, ) self.assertArrayEqual(result[0].data.mask, mask) @@ -436,7 +436,7 @@ def test_concat_masked_2y2d(self): self.assertEqual(result[0].shape, (4, 2)) mask = np.array( [[True, False], [False, True], [False, True], [True, False]], - dtype=np.bool, + dtype=np.bool_, ) self.assertArrayEqual(result[0].data.mask, mask) @@ -458,7 +458,7 @@ def test_concat_masked_2y2d_with_concrete_and_lazy(self): self.assertEqual(result[0].shape, (4, 2)) mask = np.array( [[True, False], [False, True], [False, True], [True, False]], - dtype=np.bool, + dtype=np.bool_, ) self.assertArrayEqual(result[0].data.mask, mask) @@ -480,7 +480,7 @@ def test_concat_masked_2y2d_with_lazy_and_concrete(self): self.assertEqual(result[0].shape, (4, 2)) mask = np.array( [[True, False], [False, True], [False, True], [True, False]], - dtype=np.bool, + dtype=np.bool_, ) self.assertArrayEqual(result[0].data.mask, mask) diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py index e404216143..185beb1bae 100644 --- a/lib/iris/tests/test_merge.py +++ b/lib/iris/tests/test_merge.py @@ -468,7 +468,7 @@ def _make_cube(self, a, b, c, d, data=0): ) for name, value in zip(["a", "b", "c", "d"], [a, b, c, d]): - dtype = np.str if isinstance(value, str) else np.float32 + dtype = np.str_ if isinstance(value, str) else np.float32 cube.add_aux_coord( AuxCoord( np.array([value], dtype=dtype), long_name=name, units="1" @@ -613,7 +613,7 @@ def _make_cube(self, a, b, data=0, a_dim=False, b_dim=False): ) for name, value, dim in zip(["a", "b"], [a, b], [a_dim, b_dim]): - dtype = np.str if isinstance(value, str) else np.float32 + dtype = np.str_ if isinstance(value, str) else np.float32 ctype = DimCoord if dim else AuxCoord coord = ctype( np.array([value], dtype=dtype), long_name=name, units="1" diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index 8ac86da7ec..0741a24926 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -431,7 +431,7 @@ def test_rotated_to_osgb(self): [1, 1, 1, 1, 1, 0, 0, 1, 1, 1], [1, 1, 1, 1, 1, 0, 0, 1, 1, 1], ], - np.bool, + np.bool_, ) self.assertArrayEqual(expected_mask, ut.data.mask) self.assertArrayEqual(expected_mask, vt.data.mask) diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index f1e385731a..5c7b5ea7d4 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -266,7 +266,7 @@ def _regrid(self, data, method, extrapolation_mode=None): def test_default_ndarray(self): # NaN -> NaN # Extrapolated -> NaN - data = np.arange(12, dtype=np.float).reshape(3, 4) + data = np.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan for method in self.methods: result = self._regrid(data, method) @@ -278,7 +278,7 @@ def test_default_maskedarray(self): # NaN -> NaN # Extrapolated -> Masked # Masked -> Masked - data = ma.arange(12, dtype=np.float).reshape(3, 4) + data = ma.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan data[2, 3] = ma.masked for method in self.methods: @@ -293,7 +293,7 @@ def test_default_maskedarray_none_masked(self): # NaN -> NaN # Extrapolated -> Masked # Masked -> N/A - data = ma.arange(12, dtype=np.float).reshape(3, 4) + data = ma.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan for method in self.methods: result = self._regrid(data, method) @@ -307,7 +307,7 @@ def test_default_maskedarray_none_masked_expanded(self): # NaN -> NaN # Extrapolated -> Masked # Masked -> N/A - data = ma.arange(12, dtype=np.float).reshape(3, 4) + data = ma.arange(12, dtype=np.float64).reshape(3, 4) # Make sure the mask has been expanded data.mask = False data[0, 0] = np.nan @@ -322,7 +322,7 @@ def test_default_maskedarray_none_masked_expanded(self): def test_method_ndarray(self): # NaN -> NaN # Extrapolated -> linear - data = np.arange(12, dtype=np.float).reshape(3, 4) + data = np.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan for method in self.methods: result = self._regrid(data, method, "extrapolate") @@ -334,7 +334,7 @@ def test_method_maskedarray(self): # NaN -> NaN # Extrapolated -> linear # Masked -> Masked - data = ma.arange(12, dtype=np.float).reshape(3, 4) + data = ma.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan data[2, 3] = ma.masked for method in self.methods: @@ -348,7 +348,7 @@ def test_method_maskedarray(self): def test_nan_ndarray(self): # NaN -> NaN # Extrapolated -> NaN - data = np.arange(12, dtype=np.float).reshape(3, 4) + data = np.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan for method in self.methods: result = self._regrid(data, method, "nan") @@ -360,7 +360,7 @@ def test_nan_maskedarray(self): # NaN -> NaN # Extrapolated -> NaN # Masked -> Masked - data = ma.arange(12, dtype=np.float).reshape(3, 4) + data = ma.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan data[2, 3] = ma.masked for method in self.methods: @@ -373,7 +373,7 @@ def test_nan_maskedarray(self): def test_error_ndarray(self): # Values irrelevant - the function raises an error. - data = np.arange(12, dtype=np.float).reshape(3, 4) + data = np.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan for method in self.methods: with self.assertRaisesRegex(ValueError, "out of bounds"): @@ -381,7 +381,7 @@ def test_error_ndarray(self): def test_error_maskedarray(self): # Values irrelevant - the function raises an error. - data = ma.arange(12, dtype=np.float).reshape(3, 4) + data = ma.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan data[2, 3] = ma.masked for method in self.methods: @@ -392,7 +392,7 @@ def test_mask_ndarray(self): # NaN -> NaN # Extrapolated -> Masked (this is different from all the other # modes) - data = np.arange(12, dtype=np.float).reshape(3, 4) + data = np.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan for method in self.methods: result = self._regrid(data, method, "mask") @@ -406,7 +406,7 @@ def test_mask_maskedarray(self): # NaN -> NaN # Extrapolated -> Masked # Masked -> Masked - data = ma.arange(12, dtype=np.float).reshape(3, 4) + data = ma.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan data[2, 3] = ma.masked for method in self.methods: @@ -420,7 +420,7 @@ def test_mask_maskedarray(self): def test_nanmask_ndarray(self): # NaN -> NaN # Extrapolated -> NaN - data = np.arange(12, dtype=np.float).reshape(3, 4) + data = np.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan for method in self.methods: result = self._regrid(data, method, "nanmask") @@ -432,7 +432,7 @@ def test_nanmask_maskedarray(self): # NaN -> NaN # Extrapolated -> Masked # Masked -> Masked - data = ma.arange(12, dtype=np.float).reshape(3, 4) + data = ma.arange(12, dtype=np.float64).reshape(3, 4) data[0, 0] = np.nan data[2, 3] = ma.masked for method in self.methods: @@ -444,7 +444,7 @@ def test_nanmask_maskedarray(self): self.assertMaskedArrayEqual(result, expected) def test_invalid(self): - data = np.arange(12, dtype=np.float).reshape(3, 4) + data = np.arange(12, dtype=np.float64).reshape(3, 4) emsg = "Invalid extrapolation mode" for method in self.methods: with self.assertRaisesRegex(ValueError, emsg): diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py index 6e8e40cd1b..a5cb5480a0 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py @@ -195,12 +195,12 @@ def setUp(self): np.arange(-0.975, 0, 0.05, dtype=float), units="1", long_name="s" ) self.eta = AuxCoord( - np.arange(-1, 3, dtype=np.float).reshape(2, 2), + np.arange(-1, 3, dtype=np.float64).reshape(2, 2), long_name="eta", units="m", ) self.depth = AuxCoord( - np.arange(4, dtype=np.float).reshape(2, 2) * 1e3, + np.arange(4, dtype=np.float64).reshape(2, 2) * 1e3, long_name="depth", units="m", ) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py index 238df2f073..321a013ef8 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py @@ -179,12 +179,12 @@ def setUp(self): np.linspace(-0.959, -0.001, 36), units="1", long_name="c" ) self.eta = AuxCoord( - np.arange(-1, 3, dtype=np.float).reshape(2, 2), + np.arange(-1, 3, dtype=np.float64).reshape(2, 2), long_name="eta", units="m", ) self.depth = AuxCoord( - np.array([[5, 200], [1000, 4000]], dtype=np.float), + np.array([[5, 200], [1000, 4000]], dtype=np.float64), long_name="depth", units="m", ) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py index fb3ada382e..d16285cd5d 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py @@ -179,12 +179,12 @@ def setUp(self): np.linspace(-0.959, -0.001, 36), units="1", long_name="c" ) self.eta = AuxCoord( - np.arange(-1, 3, dtype=np.float).reshape(2, 2), + np.arange(-1, 3, dtype=np.float64).reshape(2, 2), long_name="eta", units="m", ) self.depth = AuxCoord( - np.array([[5, 200], [1000, 4000]], dtype=np.float), + np.array([[5, 200], [1000, 4000]], dtype=np.float64), long_name="depth", units="m", ) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py index 69a8a32c6e..d4c0a33fdf 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py @@ -102,12 +102,12 @@ def setUp(self): np.linspace(-0.05, -1, 5), long_name="sigma", units="1" ) self.eta = AuxCoord( - np.arange(-1, 3, dtype=np.float).reshape(2, 2), + np.arange(-1, 3, dtype=np.float64).reshape(2, 2), long_name="eta", units="m", ) self.depth = AuxCoord( - np.arange(4, dtype=np.float).reshape(2, 2) * 1e3, + np.arange(4, dtype=np.float64).reshape(2, 2) * 1e3, long_name="depth", units="m", ) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py index 4a4e30b9ca..b8a574258d 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py @@ -199,22 +199,22 @@ def derive(sigma, eta, depth, depth_c, nsigma, zlev, coord=True): def setUp(self): self.sigma = DimCoord( - np.arange(5, dtype=np.float) * 10, long_name="sigma", units="1" + np.arange(5, dtype=np.float64) * 10, long_name="sigma", units="1" ) self.eta = AuxCoord( - np.arange(4, dtype=np.float).reshape(2, 2), + np.arange(4, dtype=np.float64).reshape(2, 2), long_name="eta", units="m", ) self.depth = AuxCoord( - np.arange(4, dtype=np.float).reshape(2, 2) * 10, + np.arange(4, dtype=np.float64).reshape(2, 2) * 10, long_name="depth", units="m", ) self.depth_c = AuxCoord([15], long_name="depth_c", units="m") self.nsigma = AuxCoord([3], long_name="nsigma") self.zlev = DimCoord( - np.arange(5, dtype=np.float) * 10, long_name="zlev", units="m" + np.arange(5, dtype=np.float64) * 10, long_name="zlev", units="m" ) self.kwargs = dict( sigma=self.sigma, diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py index 71fabe6c73..4ff8ec0de6 100644 --- a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py @@ -1074,8 +1074,8 @@ def setUp(self): self.values = OrderedDict( one=sentinel.one, two=sentinel.two, - three=np.float(3.14), - four=np.arange(10, dtype=np.float), + three=np.float64(3.14), + four=np.arange(10, dtype=np.float64), five=ma.arange(10, dtype=np.int16), ) self.cls = BaseMetadata diff --git a/lib/iris/tests/unit/common/metadata/test__hexdigest.py b/lib/iris/tests/unit/common/metadata/test_hexdigest.py similarity index 90% rename from lib/iris/tests/unit/common/metadata/test__hexdigest.py rename to lib/iris/tests/unit/common/metadata/test_hexdigest.py index 798f71bcd0..55c697ea6d 100644 --- a/lib/iris/tests/unit/common/metadata/test__hexdigest.py +++ b/lib/iris/tests/unit/common/metadata/test_hexdigest.py @@ -4,7 +4,7 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Unit tests for the :func:`iris.common.metadata._hexdigest`. +Unit tests for the :func:`iris.common.metadata.hexdigest`. """ @@ -18,7 +18,7 @@ import numpy as np from xxhash import xxh64, xxh64_hexdigest -from iris.common.metadata import _hexdigest as hexdigest +from iris.common.metadata import hexdigest class TestBytesLikeObject(tests.IrisTest): @@ -49,18 +49,18 @@ def test_string(self): self.assertEqual(expected, hexdigest(value)) def test_numpy_array_int(self): - value = np.arange(10, dtype=np.int) + value = np.arange(10, dtype=np.int_) expected = self._ndarray(value) self.assertEqual(expected, hexdigest(value)) def test_numpy_array_float(self): - value = np.arange(10, dtype=np.float) + value = np.arange(10, dtype=np.float64) expected = self._ndarray(value) self.assertEqual(expected, hexdigest(value)) def test_numpy_array_float_not_int(self): - ivalue = np.arange(10, dtype=np.int) - fvalue = np.arange(10, dtype=np.float) + ivalue = np.arange(10, dtype=np.int_) + fvalue = np.arange(10, dtype=np.float64) expected = self._ndarray(ivalue) self.assertNotEqual(expected, hexdigest(fvalue)) @@ -75,7 +75,7 @@ def test_numpy_array_reshape_not_flat(self): self.assertNotEqual(expected, hexdigest(value.flatten())) def test_masked_array_int(self): - value = ma.arange(10, dtype=np.int) + value = ma.arange(10, dtype=np.int_) expected = self._masked(value) self.assertEqual(expected, hexdigest(value)) @@ -85,7 +85,7 @@ def test_masked_array_int(self): self.assertEqual(expected, hexdigest(value)) def test_masked_array_float(self): - value = ma.arange(10, dtype=np.float) + value = ma.arange(10, dtype=np.float64) expected = self._masked(value) self.assertEqual(expected, hexdigest(value)) @@ -95,8 +95,8 @@ def test_masked_array_float(self): self.assertEqual(expected, hexdigest(value)) def test_masked_array_float_not_int(self): - ivalue = ma.arange(10, dtype=np.int) - fvalue = ma.arange(10, dtype=np.float) + ivalue = ma.arange(10, dtype=np.int_) + fvalue = ma.arange(10, dtype=np.float64) expected = self._masked(ivalue) self.assertNotEqual(expected, hexdigest(fvalue)) @@ -127,7 +127,7 @@ def test_int(self): self.assertEqual(expected, hexdigest(value)) def test_numpy_int(self): - value = np.int(123) + value = int(123) expected = self._expected(value) self.assertEqual(expected, hexdigest(value)) @@ -137,7 +137,7 @@ def test_float(self): self.assertEqual(expected, hexdigest(value)) def test_numpy_float(self): - value = np.float(123.4) + value = float(123.4) expected = self._expected(value) self.assertEqual(expected, hexdigest(value)) diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py index f61b489c2b..6b8064faca 100644 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py +++ b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py @@ -37,7 +37,7 @@ def setUp(self): # Source cube. self.test_src_name = "air_temperature" self.test_src_units = "K" - self.test_src_data = ma.arange(1, 13, dtype=np.float).reshape(3, 4) + self.test_src_data = ma.arange(1, 13, dtype=np.float64).reshape(3, 4) self.test_src_attributes = dict(wibble="wobble") self.test_scalar_coord = iris.coords.DimCoord( [1], long_name="test_scalar_coord" @@ -135,7 +135,7 @@ def setUp(self): ) def _weighted_mean(self, points): - points = np.asarray(points, dtype=np.float) + points = np.asarray(points, dtype=np.float64) weights = points * self.weight_factor numerator = denominator = 0 for point, weight in zip(points, weights): diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 7d4bf232fc..26296daccf 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -59,7 +59,7 @@ def netcdf_variable( class Test_translate__global_attributes(tests.IrisTest): def setUp(self): - ncvar = netcdf_variable("ncvar", "height", np.float) + ncvar = netcdf_variable("ncvar", "height", np.float64) ncattrs = mock.Mock(return_value=["dimensions"]) getncattr = mock.Mock(return_value="something something_else") self.dataset = mock.Mock( @@ -80,24 +80,24 @@ def test_create_global_attributes(self): class Test_translate__formula_terms(tests.IrisTest): def setUp(self): self.delta = netcdf_variable( - "delta", "height", np.float, bounds="delta_bnds" + "delta", "height", np.float64, bounds="delta_bnds" ) self.delta_bnds = netcdf_variable( "delta_bnds", "height bnds", np.float ) self.sigma = netcdf_variable( - "sigma", "height", np.float, bounds="sigma_bnds" + "sigma", "height", np.float64, bounds="sigma_bnds" ) self.sigma_bnds = netcdf_variable( "sigma_bnds", "height bnds", np.float ) - self.orography = netcdf_variable("orography", "lat lon", np.float) + self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" standard_name = "atmosphere_hybrid_height_coordinate" self.height = netcdf_variable( "height", "height", - np.float, + np.float64, formula_terms=formula_terms, bounds="height_bnds", standard_name=standard_name, @@ -106,13 +106,16 @@ def setUp(self): # which will be ignored by the cf loader. formula_terms = "a: delta_bnds b: sigma_bnds orog: orography" self.height_bnds = netcdf_variable( - "height_bnds", "height bnds", np.float, formula_terms=formula_terms + "height_bnds", + "height bnds", + np.float64, + formula_terms=formula_terms, ) - self.lat = netcdf_variable("lat", "lat", np.float) - self.lon = netcdf_variable("lon", "lon", np.float) + self.lat = netcdf_variable("lat", "lat", np.float64) + self.lon = netcdf_variable("lon", "lon", np.float64) # Note that, only lat and lon are explicitly associated as coordinates. self.temp = netcdf_variable( - "temp", "height lat lon", np.float, coordinates="lat lon" + "temp", "height lat lon", np.float64, coordinates="lat lon" ) self.variables = dict( @@ -179,24 +182,24 @@ def test_create_formula_terms(self): class Test_build_cf_groups__formula_terms(tests.IrisTest): def setUp(self): self.delta = netcdf_variable( - "delta", "height", np.float, bounds="delta_bnds" + "delta", "height", np.float64, bounds="delta_bnds" ) self.delta_bnds = netcdf_variable( "delta_bnds", "height bnds", np.float ) self.sigma = netcdf_variable( - "sigma", "height", np.float, bounds="sigma_bnds" + "sigma", "height", np.float64, bounds="sigma_bnds" ) self.sigma_bnds = netcdf_variable( "sigma_bnds", "height bnds", np.float ) - self.orography = netcdf_variable("orography", "lat lon", np.float) + self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" standard_name = "atmosphere_hybrid_height_coordinate" self.height = netcdf_variable( "height", "height", - np.float, + np.float64, formula_terms=formula_terms, bounds="height_bnds", standard_name=standard_name, @@ -205,15 +208,18 @@ def setUp(self): # which will be ignored by the cf loader. formula_terms = "a: delta_bnds b: sigma_bnds orog: orography" self.height_bnds = netcdf_variable( - "height_bnds", "height bnds", np.float, formula_terms=formula_terms + "height_bnds", + "height bnds", + np.float64, + formula_terms=formula_terms, ) - self.lat = netcdf_variable("lat", "lat", np.float) - self.lon = netcdf_variable("lon", "lon", np.float) - self.x = netcdf_variable("x", "lat lon", np.float) - self.y = netcdf_variable("y", "lat lon", np.float) + self.lat = netcdf_variable("lat", "lat", np.float64) + self.lon = netcdf_variable("lon", "lon", np.float64) + self.x = netcdf_variable("x", "lat lon", np.float64) + self.y = netcdf_variable("y", "lat lon", np.float64) # Note that, only lat and lon are explicitly associated as coordinates. self.temp = netcdf_variable( - "temp", "height lat lon", np.float, coordinates="x y" + "temp", "height lat lon", np.float64, coordinates="x y" ) self.variables = dict( @@ -332,7 +338,7 @@ def test_auxiliary_ignore(self): self.assertEqual(warn.call_count, 1) def test_promoted_auxiliary_ignore(self): - self.wibble = netcdf_variable("wibble", "lat wibble", np.float) + self.wibble = netcdf_variable("wibble", "lat wibble", np.float64) self.variables["wibble"] = self.wibble self.orography.coordinates = "wibble" with mock.patch( diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index 7c3ef33182..20a431994c 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -199,8 +199,8 @@ def test_odd_bplon_rotated(self): class Test__init__(tests.IrisTest): def setUp(self): - header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int) - header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float) + header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int_) + header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float64) self.header = list(header_longs) + list(header_floats) def test_no_headers(self): @@ -232,8 +232,8 @@ def test_raw_lbpack(self): class Test__getattr__(tests.IrisTest): def setUp(self): - header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int) - header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float) + header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int_) + header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float64) self.header = list(header_longs) + list(header_floats) def test_attr_singular_long(self): diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py index b6b9cb3263..8bf9a0435b 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py @@ -32,7 +32,7 @@ def setUp(self): decompressed = np.arange(data_len).reshape(*self.data_shape) decompressed *= np.arange(self.data_shape[1]) % 3 + 1 - decompressed_mask = np.zeros(self.data_shape, np.bool) + decompressed_mask = np.zeros(self.data_shape, np.bool_) decompressed_mask[ y_halo + rim : -(y_halo + rim), x_halo + rim : -(x_halo + rim) ] = True @@ -81,7 +81,7 @@ def setUp(self): self.land = np.array( [[0, 1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1]], dtype=np.float64 ) - sea = ~self.land.astype(np.bool) + sea = ~self.land.astype(np.bool_) self.land_masked_data = np.array([1, 3, 4.5]) self.sea_masked_data = np.array([1, 3, 4.5, -4, 5, 0, 1, 2, 3]) diff --git a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py index c8aebc34e4..f02b86cf33 100644 --- a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py +++ b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py @@ -44,7 +44,7 @@ def test_dtype_change(self): lazy_array = as_lazy_data(concrete_array) wrapped = lazy_elementwise(lazy_array, _test_elementwise_op) self.assertTrue(is_lazy_data(wrapped)) - self.assertEqual(wrapped.dtype, np.int) + self.assertEqual(wrapped.dtype, np.int_) self.assertEqual(wrapped.compute().dtype, wrapped.dtype) From 206cdcc8850a70213c3d4b52c24c4f3a608dbb3f Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 22 Feb 2021 21:45:42 +0000 Subject: [PATCH 08/22] Connectivity manager (#4017) * ConnectivityManager first pass. * ConnectivityManager align with proposed CoordManager. * Connectivity Manager review actions. * Connectivity Manager more review changes. * Use metadata_manager for Mesh location dimension. * Mesh dimension name abstraction. * Align Cooord and Connectivity Managers filters methods. * Completed Mesh class. * filter_cf improvements. * Moved filter_cf. * Mesh connectivity manager namedtuples comment. * Mesh removed trailing underscores. * Mesh _set_dimension_names improvements. * Mesh import rationalisation. * Mesh connectivity manager remove NDIM. * Connectivity manager use lazy indices_by_src(). * Connectivity manager clearer removal syntax. * Connectivity manager don't override __init__. * Connectivity manager correct base class syntax. * Metadata filter hexdigest reference fix. * test_MeshMetadata fix. * Rename filter to metadata_filter. --- docs/src/userguide/cube_statistics.rst | 76 +- lib/iris/common/metadata.py | 133 +++ lib/iris/cube.py | 91 +- lib/iris/exceptions.py | 6 + lib/iris/experimental/ugrid.py | 798 +++++++++++------- .../common/metadata/test_metadata_filter.py | 118 +++ .../experimental/ugrid/test_MeshMetadata.py | 4 +- 7 files changed, 808 insertions(+), 418 deletions(-) create mode 100644 lib/iris/tests/unit/common/metadata/test_metadata_filter.py diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/userguide/cube_statistics.rst index 4eb016078e..d62a056f33 100644 --- a/docs/src/userguide/cube_statistics.rst +++ b/docs/src/userguide/cube_statistics.rst @@ -23,9 +23,9 @@ Collapsing Entire Data Dimensions In the :doc:`subsetting_a_cube` section we saw how to extract a subset of a cube in order to reduce either its dimensionality or its resolution. -Instead of simply extracting a sub-region of the data, -we can produce statistical functions of the data values -across a particular dimension, +Instead of simply extracting a sub-region of the data, +we can produce statistical functions of the data values +across a particular dimension, such as a 'mean over time' or 'minimum over latitude'. .. _cube-statistics_forecast_printout: @@ -57,9 +57,9 @@ For instance, suppose we have a cube: um_version: 7.3 -In this case we have a 4 dimensional cube; -to mean the vertical (z) dimension down to a single valued extent -we can pass the coordinate name and the aggregation definition to the +In this case we have a 4 dimensional cube; +to mean the vertical (z) dimension down to a single valued extent +we can pass the coordinate name and the aggregation definition to the :meth:`Cube.collapsed() ` method: >>> import iris.analysis @@ -88,8 +88,8 @@ we can pass the coordinate name and the aggregation definition to the mean: model_level_number -Similarly other analysis operators such as ``MAX``, ``MIN`` and ``STD_DEV`` -can be used instead of ``MEAN``, see :mod:`iris.analysis` for a full list +Similarly other analysis operators such as ``MAX``, ``MIN`` and ``STD_DEV`` +can be used instead of ``MEAN``, see :mod:`iris.analysis` for a full list of currently supported operators. For an example of using this functionality, the @@ -103,14 +103,14 @@ in the gallery takes a zonal mean of an ``XYT`` cube by using the Area Averaging ^^^^^^^^^^^^^^ -Some operators support additional keywords to the ``cube.collapsed`` method. -For example, :func:`iris.analysis.MEAN ` supports -a weights keyword which can be combined with +Some operators support additional keywords to the ``cube.collapsed`` method. +For example, :func:`iris.analysis.MEAN ` supports +a weights keyword which can be combined with :func:`iris.analysis.cartography.area_weights` to calculate an area average. -Let's use the same data as was loaded in the previous example. -Since ``grid_latitude`` and ``grid_longitude`` were both point coordinates -we must guess bound positions for them +Let's use the same data as was loaded in the previous example. +Since ``grid_latitude`` and ``grid_longitude`` were both point coordinates +we must guess bound positions for them in order to calculate the area of the grid boxes:: import iris.analysis.cartography @@ -155,24 +155,24 @@ including an example on taking a :ref:`global area-weighted mean Partially Reducing Data Dimensions ---------------------------------- -Instead of completely collapsing a dimension, other methods can be applied -to reduce or filter the number of data points of a particular dimension. +Instead of completely collapsing a dimension, other methods can be applied +to reduce or filter the number of data points of a particular dimension. Aggregation of Grouped Data ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The :meth:`Cube.aggregated_by ` operation -combines data for all points with the same value of a given coordinate. -To do this, you need a coordinate whose points take on only a limited set -of different values -- the *number* of these then determines the size of the +The :meth:`Cube.aggregated_by ` operation +combines data for all points with the same value of a given coordinate. +To do this, you need a coordinate whose points take on only a limited set +of different values -- the *number* of these then determines the size of the reduced dimension. -The :mod:`iris.coord_categorisation` module can be used to make such -'categorical' coordinates out of ordinary ones: The most common use is -to aggregate data over regular *time intervals*, +The :mod:`iris.coord_categorisation` module can be used to make such +'categorical' coordinates out of ordinary ones: The most common use is +to aggregate data over regular *time intervals*, such as by calendar month or day of the week. -For example, let's create two new coordinates on the cube +For example, let's create two new coordinates on the cube to represent the climatological seasons and the season year respectively:: import iris @@ -188,8 +188,8 @@ to represent the climatological seasons and the season year respectively:: .. note:: - The 'season year' is not the same as year number, because (e.g.) the months - Dec11, Jan12 + Feb12 all belong to 'DJF-12'. + The 'season year' is not the same as year number, because (e.g.) the months + Dec11, Jan12 + Feb12 all belong to 'DJF-12'. See :meth:`iris.coord_categorisation.add_season_year`. @@ -206,10 +206,10 @@ to represent the climatological seasons and the season year respectively:: iris.coord_categorisation.add_season_year(cube, 'time', name='season_year') annual_seasonal_mean = cube.aggregated_by( - ['clim_season', 'season_year'], + ['clim_season', 'season_year'], iris.analysis.MEAN) - + Printing this cube now shows that two extra coordinates exist on the cube: .. doctest:: aggregation @@ -238,20 +238,20 @@ These two coordinates can now be used to aggregate by season and climate-year: .. doctest:: aggregation >>> annual_seasonal_mean = cube.aggregated_by( - ... ['clim_season', 'season_year'], + ... ['clim_season', 'season_year'], ... iris.analysis.MEAN) >>> print(repr(annual_seasonal_mean)) - -The primary change in the cube is that the cube's data has been -reduced in the 'time' dimension by aggregation (taking means, in this case). -This has collected together all data points with the same values of season and + +The primary change in the cube is that the cube's data has been +reduced in the 'time' dimension by aggregation (taking means, in this case). +This has collected together all data points with the same values of season and season-year. The results are now indexed by the 19 different possible values of season and season-year in a new, reduced 'time' dimension. -We can see this by printing the first 10 values of season+year -from the original cube: These points are individual months, +We can see this by printing the first 10 values of season+year +from the original cube: These points are individual months, so adjacent ones are often in the same season: .. doctest:: aggregation @@ -271,7 +271,7 @@ so adjacent ones are often in the same season: djf 2007 djf 2007 -Compare this with the first 10 values of the new cube's coordinates: +Compare this with the first 10 values of the new cube's coordinates: All the points now have distinct season+year values: .. doctest:: aggregation @@ -294,7 +294,7 @@ All the points now have distinct season+year values: Because the original data started in April 2006 we have some incomplete seasons (e.g. there were only two months worth of data for 'mam-2006'). -In this case we can fix this by removing all of the resultant 'times' which +In this case we can fix this by removing all of the resultant 'times' which do not cover a three month period (note: judged here as > 3*28 days): .. doctest:: aggregation @@ -306,7 +306,7 @@ do not cover a three month period (note: judged here as > 3*28 days): >>> full_season_means -The final result now represents the seasonal mean temperature for 17 seasons +The final result now represents the seasonal mean temperature for 17 seasons from jja-2006 to jja-2010: .. doctest:: aggregation diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 40dccf9428..e81c6b206c 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -43,6 +43,9 @@ # https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name + +from ..util import guess_coord_axis + _TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""") # Configure the logger. @@ -1339,6 +1342,136 @@ def equal(self, other, lenient=None): return super().equal(other, lenient=lenient) +def metadata_filter( + instances, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, +): + """ + Filter a collection of objects by their metadata to fit the given metadata + criteria. Criteria be one or both of: specific properties / other objects + carrying metadata to be matched. + + Args: + + * instances + An iterable of objects to be filtered. + + Kwargs: + + * item + Either + + (a) a :attr:`standard_name`, :attr:`long_name`, or + :attr:`var_name`. Defaults to value of `default` + (which itself defaults to `unknown`) as defined in + :class:`~iris.common.CFVariableMixin`. + + (b) a 'coordinate' instance with metadata equal to that of + the desired coordinates. Accepts either a + :class:`~iris.coords.DimCoord`, :class:`~iris.coords.AuxCoord`, + :class:`~iris.aux_factory.AuxCoordFactory`, + :class:`~iris.common.CoordMetadata` or + :class:`~iris.common.DimCoordMetadata` or + :class:`~iris.experimental.ugrid.ConnectivityMetadata`. + * standard_name + The CF standard name of the desired coordinate. If None, does not + check for standard name. + * long_name + An unconstrained description of the coordinate. If None, does not + check for long_name. + * var_name + The netCDF variable name of the desired coordinate. If None, does + not check for var_name. + * attributes + A dictionary of attributes desired on the coordinates. If None, + does not check for attributes. + * axis + The desired coordinate axis, see + :func:`~iris.util.guess_coord_axis`. If None, does not check for + axis. Accepts the values 'X', 'Y', 'Z' and 'T' (case-insensitive). + + Returns: + A list of the objects supplied in the ``instances`` argument, limited + to only those that matched the given criteria. + + """ + name = None + obj = None + + if isinstance(item, str): + name = item + else: + obj = item + + result = instances + + if name is not None: + result = [instance for instance in result if instance.name() == name] + + if standard_name is not None: + result = [ + instance + for instance in result + if instance.standard_name == standard_name + ] + + if long_name is not None: + result = [ + instance for instance in result if instance.long_name == long_name + ] + + if var_name is not None: + result = [ + instance for instance in result if instance.var_name == var_name + ] + + if attributes is not None: + if not isinstance(attributes, Mapping): + msg = ( + "The attributes keyword was expecting a dictionary " + "type, but got a %s instead." % type(attributes) + ) + raise ValueError(msg) + + def attr_filter(instance): + return all( + k in instance.attributes + and hexdigest(instance.attributes[k]) == hexdigest(v) + for k, v in attributes.items() + ) + + result = [instance for instance in result if attr_filter(instance)] + + if axis is not None: + axis = axis.upper() + result = [ + instance + for instance in result + if guess_coord_axis(instance) == axis + ] + + if obj is not None: + if hasattr(obj, "__class__") and issubclass( + obj.__class__, BaseMetadata + ): + target_metadata = obj + else: + target_metadata = obj.metadata + + result = [ + instance + for instance in result + if instance.metadata == target_metadata + ] + + return result + + def metadata_manager_factory(cls, **kwargs): """ A class instance factory function responsible for manufacturing diff --git a/lib/iris/cube.py b/lib/iris/cube.py index a15951900b..e8b6d4a692 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -13,7 +13,6 @@ from collections.abc import ( Iterable, Container, - Mapping, MutableMapping, Iterator, ) @@ -40,11 +39,10 @@ import iris.aux_factory from iris.common import ( CFVariableMixin, - CoordMetadata, CubeMetadata, - DimCoordMetadata, metadata_manager_factory, ) +from iris.common.metadata import metadata_filter import iris.coord_systems import iris.coords import iris.exceptions @@ -1639,14 +1637,6 @@ def coords( See also :meth:`Cube.coord()`. """ - name = None - coord = None - - if isinstance(name_or_coord, str): - name = name_or_coord - else: - coord = name_or_coord - coords_and_factories = [] if dim_coords in [True, None]: @@ -1656,62 +1646,15 @@ def coords( coords_and_factories += list(self.aux_coords) coords_and_factories += list(self.aux_factories) - if name is not None: - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.name() == name - ] - - if standard_name is not None: - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.standard_name == standard_name - ] - - if long_name is not None: - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.long_name == long_name - ] - - if var_name is not None: - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.var_name == var_name - ] - - if axis is not None: - axis = axis.upper() - guess_axis = iris.util.guess_coord_axis - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if guess_axis(coord_) == axis - ] - - if attributes is not None: - if not isinstance(attributes, Mapping): - msg = ( - "The attributes keyword was expecting a dictionary " - "type, but got a %s instead." % type(attributes) - ) - raise ValueError(msg) - - def attr_filter(coord_): - return all( - k in coord_.attributes and coord_.attributes[k] == v - for k, v in attributes.items() - ) - - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if attr_filter(coord_) - ] + coords_and_factories = metadata_filter( + coords_and_factories, + item=name_or_coord, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + ) if coord_system is not None: coords_and_factories = [ @@ -1720,20 +1663,6 @@ def attr_filter(coord_): if coord_.coord_system == coord_system ] - if coord is not None: - if hasattr(coord, "__class__") and coord.__class__ in ( - CoordMetadata, - DimCoordMetadata, - ): - target_metadata = coord - else: - target_metadata = coord.metadata - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.metadata == target_metadata - ] - if contains_dimension is not None: coords_and_factories = [ coord_ diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 1c05d13163..12d24ef70f 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -39,6 +39,12 @@ class AncillaryVariableNotFoundError(KeyError): pass +class ConnectivityNotFoundError(KeyError): + """Raised when a search yields no connectivities.""" + + pass + + class CoordinateMultiDimError(ValueError): """Raised when a routine doesn't support multi-dimensional coordinates.""" diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 002c40952f..45c94dbf16 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -10,7 +10,8 @@ """ -from collections import Mapping, namedtuple +from abc import ABC, abstractmethod +from collections import namedtuple from functools import wraps import dask.array as da @@ -18,28 +19,28 @@ from .. import _lazy_data as _lazy from ..common.metadata import ( - _hexdigest, BaseMetadata, - CoordMetadata, - DimCoordMetadata, metadata_manager_factory, SERVICES, SERVICES_COMBINE, SERVICES_EQUAL, SERVICES_DIFFERENCE, + metadata_filter, ) from ..common.lenient import _lenient_service as lenient_service from ..common.mixin import CFVariableMixin from ..config import get_logger from ..coords import _DimensionalMetadata, AuxCoord -from ..exceptions import CoordinateNotFoundError +from ..exceptions import CoordinateNotFoundError, ConnectivityNotFoundError from ..util import guess_coord_axis __all__ = [ "Connectivity", "ConnectivityMetadata", + "Mesh1DConnectivities", "Mesh1DCoords", + "Mesh2DConnectivities", "Mesh2DCoords", "MeshEdgeCoords", "MeshFaceCoords", @@ -70,6 +71,20 @@ MeshEdgeCoords = namedtuple("MeshEdgeCoords", ["edge_x", "edge_y"]) MeshFaceCoords = namedtuple("MeshFaceCoords", ["face_x", "face_y"]) +# Mesh connectivity manager namedtuples. +Mesh1DConnectivities = namedtuple("Mesh1DConnectivities", ["edge_node"]) +Mesh2DConnectivities = namedtuple( + "Mesh2DConnectivities", + [ + "face_node", + "edge_node", + "face_edge", + "face_face", + "edge_face", + "boundary_node", + ], +) + class Connectivity(_DimensionalMetadata): """ @@ -823,7 +838,7 @@ def __init__( attributes=None, edge_coords_and_axes=None, face_coords_and_axes=None, - # connectivities=None, + connectivities=None, node_dimension=None, edge_dimension=None, face_dimension=None, @@ -883,15 +898,18 @@ def normalise(location, axis): if self.topology_dimension == 1: self._coord_manager = _Mesh1DCoordinateManager(**kwargs) + self._connectivity_manager = _Mesh1DConnectivityManager( + *connectivities + ) elif self.topology_dimension == 2: self._coord_manager = _Mesh2DCoordinateManager(**kwargs) + self._connectivity_manager = _Mesh2DConnectivityManager( + *connectivities + ) else: emsg = f"Unsupported 'topology_dimension', got {topology_dimension!r}." raise NotImplementedError(emsg) - # based on the topology_dimension, create the appropriate connectivity manager - # self._connectivity_manager = ... - def __eq__(self, other): # TBD return NotImplemented @@ -918,20 +936,54 @@ def __str__(self): args = [] return f"{self.__class__.__name__}({', '.join(args)})" + def _set_dimension_names(self, node, edge, face, reset=False): + args = (node, edge, face) + currents = ( + self.node_dimension, + self.edge_dimension, + self.face_dimension, + ) + zipped = zip(args, currents) + if reset: + node, edge, face = [ + None if arg else current for arg, current in zipped + ] + else: + node, edge, face = [arg or current for arg, current in zipped] + + self.node_dimension = node + self.edge_dimension = edge + self.face_dimension = face + + if self.topology_dimension == 1: + result = Mesh1DNames(self.node_dimension, self.edge_dimension) + elif self.topology_dimension == 2: + result = Mesh2DNames( + self.node_dimension, self.edge_dimension, self.face_dimension + ) + else: + message = ( + f"Unsupported topology_dimension: {self.topology_dimension} ." + ) + raise NotImplementedError(message) + + return result + @property def all_coords(self): return self._coord_manager.all_members @property def edge_dimension(self): - return self._edge_dimension + return self._metadata_manager.edge_dimension @edge_dimension.setter def edge_dimension(self, name): if not name or not isinstance(name, str): - self._edge_dimension = f"Mesh{self.topology_dimension}d_edge" + edge_dimension = f"Mesh{self.topology_dimension}d_edge" else: - self._edge_dimension = name + edge_dimension = name + self._metadata_manager.edge_dimension = edge_dimension @property def edge_coords(self): @@ -939,14 +991,15 @@ def edge_coords(self): @property def face_dimension(self): - return self._face_dimension + return self._metadata_manager.face_dimension @face_dimension.setter def face_dimension(self, name): if not name or not isinstance(name, str): - self._face_dimension = f"Mesh{self.topology_dimension}d_face" + face_dimension = f"Mesh{self.topology_dimension}d_face" else: - self._face_dimension = name + face_dimension = name + self._metadata_manager.face_dimension = face_dimension @property def face_coords(self): @@ -954,55 +1007,53 @@ def face_coords(self): @property def node_dimension(self): - return self._node_dimension + return self._metadata_manager.node_dimension @node_dimension.setter def node_dimension(self, name): if not name or not isinstance(name, str): - self._node_dimension = f"Mesh{self.topology_dimension}d_node" + node_dimension = f"Mesh{self.topology_dimension}d_node" else: - self._node_dimension = name + node_dimension = name + self._metadata_manager.node_dimension = node_dimension @property def node_coords(self): return self._coord_manager.node_coords - # @property - # def all_connectivities(self): - # # return a namedtuple - # # conns = mesh.all_connectivities - # # conns.edge_node, conns.boundary_node - # pass - # - # @property - # def face_node_connectivity(self): - # # required - # return self._connectivity_manager.face_node - # - # @property - # def edge_node_connectivity(self): - # # optionally required - # return self._connectivity_manager.edge_node - # - # @property - # def face_edge_connectivity(self): - # # optional - # return self._connectivity_manager.face_edge - # - # @property - # def face_face_connectivity(self): - # # optional - # return self._connectivity_manager.face_face - # - # @property - # def edge_face_connectivity(self): - # # optional - # return self._connectivity_manager.edge_face - # - # @property - # def boundary_node_connectivity(self): - # # optional - # return self._connectivity_manager.boundary_node + @property + def all_connectivities(self): + return self._connectivity_manager.all_members + + @property + def face_node_connectivity(self): + # required + return self._connectivity_manager.face_node + + @property + def edge_node_connectivity(self): + # optionally required + return self._connectivity_manager.edge_node + + @property + def face_edge_connectivity(self): + # optional + return self._connectivity_manager.face_edge + + @property + def face_face_connectivity(self): + # optional + return self._connectivity_manager.face_face + + @property + def edge_face_connectivity(self): + # optional + return self._connectivity_manager.edge_face + + @property + def boundary_node_connectivity(self): + # optional + return self._connectivity_manager.boundary_node def add_coords( self, @@ -1022,25 +1073,56 @@ def add_coords( face_y=face_y, ) - # def add_connectivities(self, *args): - # # this supports adding a new connectivity to the manager, but also replacing an existing connectivity - # self._connectivity_manager.add(*args) - - # def connectivities( - # self, - # name_or_coord=None, - # standard_name=None, - # long_name=None, - # var_name=None, - # attributes=None, - # node=False, - # edge=False, - # face=False, - # ): - # pass - - # def connectivity(self, ...): - # pass + def add_connectivities(self, *connectivities): + self._connectivity_manager.add(*connectivities) + + def connectivities( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + node=None, + edge=None, + face=None, + ): + return self._connectivity_manager.filters( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + cf_role=cf_role, + node=node, + edge=edge, + face=face, + ) + + def connectivity( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + node=None, + edge=None, + face=None, + ): + return self._connectivity_manager.filter( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + cf_role=cf_role, + node=node, + edge=edge, + face=face, + ) def coord( self, @@ -1090,9 +1172,29 @@ def coords( face=face, ) - # def remove_connectivities(self, ...): - # # needs to respect the minimum UGRID contract - # self._connectivity_manager.remove(...) + def remove_connectivities( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + node=None, + edge=None, + face=None, + ): + return self._connectivity_manager.remove( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + cf_role=cf_role, + node=node, + edge=edge, + face=face, + ) def remove_coords( self, @@ -1106,7 +1208,7 @@ def remove_coords( edge=None, face=None, ): - self._coord_manager.remove( + return self._coord_manager.remove( item=item, standard_name=standard_name, long_name=long_name, @@ -1138,42 +1240,18 @@ def xml_element(self): # def to_MeshCoord(self, location, axis): # # factory method # # return MeshCoord(..., location=location, axis=axis) - # # use Connectivity.indices_by_src() for fetching indices. + # # use Connectivity.indices_by_src() for fetching indices, passing in the lazy_indices() result as an argument. # # def to_MeshCoords(self, location): # # factory method # # return MeshCoord(..., location=location, axis="x"), MeshCoord(..., location=location, axis="y") - # # use Connectivity.indices_by_src() for fetching indices. + # # use Connectivity.indices_by_src for fetching indices, passing in the lazy_indices() result as an argument. def dimension_names_reset(self, node=False, edge=False, face=False): - if node: - self.node_dimension = None - if edge: - self.edge_dimension = None - if face: - self.face_dimension = None - if self.topology_dimension == 1: - result = Mesh1DNames(self.node_dimension, self.edge_dimension) - else: - result = Mesh2DNames( - self.node_dimension, self.edge_dimension, self.face_dimension - ) - return result + return self._set_dimension_names(node, edge, face, reset=True) def dimension_names(self, node=None, edge=None, face=None): - if node: - self.node_dimension = node - if edge: - self.edge_dimension = edge - if face: - self.face_dimension = face - if self.topology_dimension == 1: - result = Mesh1DNames(self.node_dimension, self.edge_dimension) - else: - result = Mesh2DNames( - self.node_dimension, self.edge_dimension, self.node_dimension - ) - return result + return self._set_dimension_names(node, edge, face, reset=False) @property def cf_role(self): @@ -1248,88 +1326,6 @@ def __str__(self): ] return f"{self.__class__.__name__}({', '.join(args)})" - @staticmethod - def _filters( - members, - item=None, - standard_name=None, - long_name=None, - var_name=None, - attributes=None, - axis=None, - ): - """ - TDB: support coord_systems? - - """ - name = None - coord = None - - if isinstance(item, str): - name = item - else: - coord = item - - if name is not None: - members = {k: v for k, v in members.items() if v.name() == name} - - if standard_name is not None: - members = { - k: v - for k, v in members.items() - if v.standard_name == standard_name - } - - if long_name is not None: - members = { - k: v for k, v in members.items() if v.long_name == long_name - } - - if var_name is not None: - members = { - k: v for k, v in members.items() if v.var_name == var_name - } - - if axis is not None: - axis = axis.upper() - members = { - k: v for k, v in members.items() if guess_coord_axis(v) == axis - } - - if attributes is not None: - if not isinstance(attributes, Mapping): - emsg = ( - "The attributes keyword was expecting a dictionary " - f"type, but got a {type(attributes)} instead." - ) - raise ValueError(emsg) - - def _filter(coord): - return all( - k in coord.attributes - and _hexdigest(coord.attributes[k]) == _hexdigest(v) - for k, v in attributes.items() - ) - - members = {k: v for k, v in members.items() if _filter(v)} - - if coord is not None: - if hasattr(coord, "__class__") and coord.__class__ in ( - CoordMetadata, - DimCoordMetadata, - ): - target_metadata = coord - else: - target_metadata = coord.metadata - - members = { - k: v - for k, v in members.items() - if v.metadata == target_metadata - } - - return members - def _remove(self, **kwargs): result = {} members = self.filters(**kwargs) @@ -1481,6 +1477,7 @@ def add(self, node_x=None, node_y=None, edge_x=None, edge_y=None): self._add(MeshEdgeCoords(edge_x, edge_y)) def filter(self, **kwargs): + # TODO: rationalise commonality with MeshConnectivityManager.filter and Cube.coord. result = self.filters(**kwargs) if len(result) > 1: @@ -1525,6 +1522,8 @@ def filters( edge=None, face=None, ): + # TBD: support coord_systems? + # rationalise the tri-state behaviour args = [node, edge, face] state = not any(set(filter(lambda arg: arg is not None, args))) @@ -1532,28 +1531,22 @@ def filters( lambda arg: arg if arg is not None else state, args ) - def func(args): - return args[1] is not None + def populated_coords(coords_tuple): + return list(filter(None, list(coords_tuple))) - members = {} + members = [] if node: - members.update( - dict(filter(func, self.node_coords._asdict().items())) - ) + members += populated_coords(self.node_coords) if edge: - members.update( - dict(filter(func, self.edge_coords._asdict().items())) - ) + members += populated_coords(self.edge_coords) if hasattr(self, "face_coords"): if face: - members.update( - dict(filter(func, self.face_coords._asdict().items())) - ) + members += populated_coords(self.face_coords) else: dmsg = "Ignoring request to filter non-existent 'face_coords'" logger.debug(dmsg, extra=dict(cls=self.__class__.__name__)) - result = self._filters( + result = metadata_filter( members, item=item, standard_name=standard_name, @@ -1563,7 +1556,12 @@ def func(args): axis=axis, ) - return result + # Use the results to filter the _members dict for returning. + result_ids = [id(r) for r in result] + result_dict = { + k: v for k, v in self._members.items() if id(v) in result_ids + } + return result_dict def remove( self, @@ -1680,102 +1678,308 @@ def remove( ) -# # keep an eye on the __init__ inheritance -# class _Mesh1DConnectivityManager: -# REQUIRED = ( -# "edge_node", -# ) -# OPTIONAL = () -# def __init__(self, edge_node): -# # required -# self.edge_node = edge_node -# -# # WOO-GA - this can easily get out of sync with the self attributes. -# # choose the container wisely e.g., could be an dict..., also the self -# # attributes may need to be @property's that access the chosen _members container -# -# # is this a list? as dict? a namedtuple? use case is self.add() -# self._members = [] -# -# if self.edge_node is not None: -# self._members.append(self.edge_node) -# -# def __iter__(self): -# for member in self._members: -# yield member -# -# def __getstate__(self): -# pass -# -# def __setstate__(self, state): -# pass -# -# def connectivity(self, **kwargs): -# # see Cube.coord for pattern, checking for a single result -# return self.connectivities(**kwargs)[0] -# -# def connectivities(self, ...): -# # see Cube.coords for relevant patterns -# # return [ ... ] -# pass -# -# def add(self, *args): -# # loop thru args and add (clobber) -# # adopt same philosophy as remove for adding connectivites with unsupported cf-role -# pass -# -# def remove(self, ...): -# # needs to respect the minimum UGRID contract -# # use logging/warning to flag items not removed - highlight in doc-string -# # don't raise an exception -# -# def __str__(self): -# pass -# -# def __repr__(self): -# pass -# -# def __eq__(self, other): -# # Full equality could be MASSIVE, so we want to avoid that. -# # Ideally we want a mesh signature from LFRic for comparison, although this would -# # limit Iris' relevance outside MO. -# # TL;DR: unknown quantity. -# raise NotImplemented -# -# def __ne__(self, other): -# # See __eq__ -# raise NotImplemented -# -# -# class _Mesh2DConnectivityManager(_Mesh1DConnectivityManager): -# REQUIRED = ( -# "face_node", -# ) -# OPTIONAL = ( -# "edge_node", -# "face_edge", -# "face_face", -# "edge_face", -# "boundary_node", -# ) -# def __init__(self, face_node, edge_node=None, face_edge=None, face_face=None, edge_face=None, boundary_node=None): -# # required -# self.face_node = face_node -# self._members = [self.face_node] -# -# # optionally required -# self.edge_node = edge_node -# # optional -# self.face_edge = face_edge -# self.face_face = face_face -# self.edge_face = edge_face -# self.boundary_node = boundary_node -# -# # edge_node could be None here. are we okay with this pattern? -# super().__init__(edge_node) -# -# # does order matter? -# self._members.extend([member for member in self.OPTIONAL if member is not None and member != "edge_node"]) +class _MeshConnectivityManagerBase(ABC): + # Override these in subclasses. + REQUIRED: tuple = NotImplemented + OPTIONAL: tuple = NotImplemented + + def __init__(self, *connectivities): + cf_roles = [c.cf_role for c in connectivities] + for requisite in self.REQUIRED: + if requisite not in cf_roles: + message = ( + f"{self.__name__} requires a {requisite} Connectivity." + ) + raise ValueError(message) + + self.ALL = self.REQUIRED + self.OPTIONAL + self._members = {member: None for member in self.ALL} + self.add(*connectivities) + + def __eq__(self, other): + # TBD + return NotImplemented + + def __getstate__(self): + # TBD + pass + + def __iter__(self): + for item in self._members.items(): + yield item + + def __ne__(self, other): + # TBD + return NotImplemented + + def __repr__(self): + args = [ + f"{member}={connectivity!r}" + for member, connectivity in self + if connectivity is not None + ] + return f"{self.__class__.__name__}({', '.join(args)})" + + def __setstate__(self, state): + # TBD + pass + + def __str__(self): + args = [ + f"{member}=True" + for member, connectivity in self + if connectivity is not None + ] + return f"{self.__class__.__name__}({', '.join(args)})" + + @property + @abstractmethod + def all_members(self): + return NotImplemented + + def add(self, *connectivities): + # Since Connectivity classes include their cf_role, no setters will be + # provided, just a means to add one or more connectivities to the + # manager. + # No warning is raised for duplicate cf_roles - user is trusted to + # validate their outputs. + add_dict = {} + for connectivity in connectivities: + if not isinstance(connectivity, Connectivity): + message = f"Expected Connectivity, got: {type(connectivity)} ." + raise ValueError(message) + cf_role = connectivity.cf_role + if cf_role not in self.ALL: + message = ( + f"Not adding connectivity ({cf_role}: " + f"{connectivity!r}) - cf_role must be one of: {self.ALL} ." + ) + logger.debug(message, extra=dict(cls=self.__class__.__name__)) + else: + add_dict[cf_role] = connectivity + + # Validate shapes. + proposed_members = {**self._members, **add_dict} + locations = set( + [ + c.src_location + for c in proposed_members.values() + if c is not None + ] + ) + for location in locations: + counts = [ + len(c.indices_by_src(c.lazy_indices())) + for c in proposed_members.values() + if c is not None and c.src_location == location + ] + # Check is list values are identical. + if not counts.count(counts[0]) == len(counts): + message = ( + f"Invalid Connectivities provided - inconsistent " + f"{location} counts." + ) + raise ValueError(message) + + self._members = proposed_members + + def filter(self, **kwargs): + # TODO: rationalise commonality with MeshCoordManager.filter and Cube.coord. + result = self.filters(**kwargs) + if len(result) > 1: + names = ", ".join( + f"{member}={connectivity!r}" + for member, connectivity in result.items() + ) + message = ( + f"Expected to find exactly 1 connectivity, but found " + f"{len(result)}. They were: {names}." + ) + raise ConnectivityNotFoundError(message) + elif len(result) == 0: + item = kwargs["item"] + _name = item + if item is not None: + if not isinstance(item, str): + _name = item.name() + bad_name = ( + _name or kwargs["standard_name"] or kwargs["long_name"] or "" + ) + message = ( + f"Expected to find exactly 1 {bad_name} connectivity, " + f"but found none." + ) + raise ConnectivityNotFoundError(message) + + return result + + def filters( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + node=None, + edge=None, + face=None, + ): + members = [c for c in self._members.values() if c is not None] + + if cf_role is not None: + members = [ + instance for instance in members if instance.cf_role == cf_role + ] + + def location_filter(instances, loc_arg, loc_name): + if loc_arg is False: + filtered = [ + instance + for instance in instances + if loc_name + not in (instance.src_location, instance.tgt_location) + ] + elif loc_arg is None: + filtered = instances + else: + # Interpret any other value as =True. + filtered = [ + instance + for instance in instances + if loc_name + in (instance.src_location, instance.tgt_location) + ] + + return filtered + + for arg, loc in ( + (node, "node"), + (edge, "edge"), + (face, "face"), + ): + members = location_filter(members, arg, loc) + + # No need to actually modify filtering behaviour - already won't return + # any face cf-roles if none are present. + supports_faces = any(["face" in role for role in self.ALL]) + if face and not supports_faces: + message = ( + "Ignoring request to filter for non-existent 'face' cf-roles." + ) + logger.debug(message, extra=dict(cls=self.__class__.__name__)) + + result = metadata_filter( + members, + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + ) + + # Use the results to filter the _members dict for returning. + result_ids = [id(r) for r in result] + result_dict = { + k: v for k, v in self._members.items() if id(v) in result_ids + } + return result_dict + + def remove( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + node=None, + edge=None, + face=None, + ): + removal_dict = self.filters( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + cf_role=cf_role, + node=node, + edge=edge, + face=face, + ) + for cf_role in self.REQUIRED: + excluded = removal_dict.pop(cf_role, None) + if excluded: + message = ( + f"Ignoring request to remove required connectivity " + f"({cf_role}: {excluded!r})" + ) + logger.debug(message, extra=dict(cls=self.__class__.__name__)) + + for cf_role in removal_dict.keys(): + self._members[cf_role] = None + + return removal_dict + + +class _Mesh1DConnectivityManager(_MeshConnectivityManagerBase): + REQUIRED = ("edge_node_connectivity",) + OPTIONAL = () + + @property + def all_members(self): + return Mesh1DConnectivities(edge_node=self.edge_node) + + @property + def edge_node(self): + return self._members["edge_node_connectivity"] + + +class _Mesh2DConnectivityManager(_MeshConnectivityManagerBase): + REQUIRED = ("face_node_connectivity",) + OPTIONAL = ( + "edge_node_connectivity", + "face_edge_connectivity", + "face_face_connectivity", + "edge_face_connectivity", + "boundary_node_connectivity", + ) + + @property + def all_members(self): + return Mesh2DConnectivities( + face_node=self.face_node, + edge_node=self.edge_node, + face_edge=self.face_edge, + face_face=self.face_face, + edge_face=self.edge_face, + boundary_node=self.boundary_node, + ) + + @property + def boundary_node(self): + return self._members["boundary_node_connectivity"] + + @property + def edge_face(self): + return self._members["edge_face_connectivity"] + + @property + def edge_node(self): + return self._members["edge_node_connectivity"] + + @property + def face_edge(self): + return self._members["face_edge_connectivity"] + + @property + def face_face(self): + return self._members["face_face_connectivity"] + + @property + def face_node(self): + return self._members["face_node_connectivity"] #: Convenience collection of lenient metadata combine services. diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py new file mode 100644 index 0000000000..dafb50554b --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -0,0 +1,118 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.common.metadata_filter`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +import numpy as np + +from iris.common.metadata import ( + CoordMetadata, + DimCoordMetadata, + metadata_filter, +) +from iris.coords import AuxCoord + +Mock = tests.mock.Mock + + +class Test_standard(tests.IrisTest): + def test_name(self): + name_one = Mock() + name_one.name.return_value = "one" + name_two = Mock() + name_two.name.return_value = "two" + input_list = [name_one, name_two] + result = metadata_filter(input_list, item="one") + self.assertIn(name_one, result) + self.assertNotIn(name_two, result) + + def test_item(self): + coord = Mock(__class__=AuxCoord) + mock = Mock() + input_list = [coord, mock] + result = metadata_filter(input_list, item=coord) + self.assertIn(coord, result) + self.assertNotIn(mock, result) + + def test_item_metadata(self): + coord = Mock(metadata=CoordMetadata) + dim_coord = Mock(metadata=DimCoordMetadata) + input_list = [coord, dim_coord] + result = metadata_filter(input_list, item=coord) + self.assertIn(coord, result) + self.assertNotIn(dim_coord, result) + + def test_standard_name(self): + name_one = Mock(standard_name="one") + name_two = Mock(standard_name="two") + input_list = [name_one, name_two] + result = metadata_filter(input_list, standard_name="one") + self.assertIn(name_one, result) + self.assertNotIn(name_two, result) + + def test_long_name(self): + name_one = Mock(long_name="one") + name_two = Mock(long_name="two") + input_list = [name_one, name_two] + result = metadata_filter(input_list, long_name="one") + self.assertIn(name_one, result) + self.assertNotIn(name_two, result) + + def test_var_name(self): + name_one = Mock(var_name="one") + name_two = Mock(var_name="two") + input_list = [name_one, name_two] + result = metadata_filter(input_list, var_name="one") + self.assertIn(name_one, result) + self.assertNotIn(name_two, result) + + def test_attributes(self): + # Confirm that this can handle attrib dicts including np arrays. + attrib_one_two = Mock( + attributes={"one": np.arange(1), "two": np.arange(2)} + ) + attrib_three_four = Mock( + attributes={"three": np.arange(3), "four": np.arange(4)} + ) + input_list = [attrib_one_two, attrib_three_four] + result = metadata_filter( + input_list, attributes=attrib_one_two.attributes + ) + self.assertIn(attrib_one_two, result) + self.assertNotIn(attrib_three_four, result) + + def test_invalid_attributes(self): + attrib_one = Mock(attributes={"one": 1}) + input_list = [attrib_one] + self.assertRaisesRegex( + ValueError, + ".*expecting a dictionary.*", + metadata_filter, + input_list, + attributes="one", + ) + + def test_axis(self): + axis_lon = Mock(standard_name="longitude") + axis_lat = Mock(standard_name="latitude") + input_list = [axis_lon, axis_lat] + result = metadata_filter(input_list, axis="x") + self.assertIn(axis_lon, result) + self.assertNotIn(axis_lat, result) + + def test_multiple_args(self): + coord_one = Mock(__class__=AuxCoord, long_name="one") + coord_two = Mock(__class__=AuxCoord, long_name="two") + input_list = [coord_one, coord_two] + result = metadata_filter(input_list, item=coord_one, long_name="one") + self.assertIn(coord_one, result) + self.assertNotIn(coord_two, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py index cfc668fb88..105365c908 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py @@ -398,10 +398,10 @@ def test_op_lenient_same_members_none(self): with mock.patch( "iris.common.metadata._LENIENT", return_value=True ): - self.assertTrue( + self.assertEqual( expected, lmetadata.combine(rmetadata)._asdict() ) - self.assertTrue( + self.assertEqual( expected, rmetadata.combine(lmetadata)._asdict() ) From f4a810879da93a72ca0b00339157490951c40238 Mon Sep 17 00:00:00 2001 From: Bill Little Date: Tue, 23 Feb 2021 09:17:34 +0000 Subject: [PATCH 09/22] minor fixes (#4025) * minor fixes * wip --- lib/iris/common/metadata.py | 29 +++++++++++++------ lib/iris/experimental/ugrid.py | 14 ++++++--- .../common/metadata/test_metadata_filter.py | 20 ++++++++++++- 3 files changed, 49 insertions(+), 14 deletions(-) diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index e81c6b206c..801ba57c44 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -27,10 +27,6 @@ __all__ = [ - "SERVICES_COMBINE", - "SERVICES_DIFFERENCE", - "SERVICES_EQUAL", - "SERVICES", "AncillaryVariableMetadata", "BaseMetadata", "CellMeasureMetadata", @@ -38,7 +34,12 @@ "CubeMetadata", "DimCoordMetadata", "hexdigest", + "metadata_filter", "metadata_manager_factory", + "SERVICES", + "SERVICES_COMBINE", + "SERVICES_DIFFERENCE", + "SERVICES_EQUAL", ] @@ -1353,13 +1354,13 @@ def metadata_filter( ): """ Filter a collection of objects by their metadata to fit the given metadata - criteria. Criteria be one or both of: specific properties / other objects + criteria. Criteria can be one or both of: specific properties / other objects carrying metadata to be matched. Args: * instances - An iterable of objects to be filtered. + One or more objects to be filtered. Kwargs: @@ -1408,6 +1409,10 @@ def metadata_filter( else: obj = item + # apply de morgan's law for one less logical operation + if not (isinstance(instances, str) or isinstance(instances, Iterable)): + instances = [instances] + result = instances if name is not None: @@ -1449,10 +1454,16 @@ def attr_filter(instance): if axis is not None: axis = axis.upper() + + def get_axis(instance): + if hasattr(instance, "axis"): + axis = instance.axis.upper() + else: + axis = guess_coord_axis(instance) + return axis + result = [ - instance - for instance in result - if guess_coord_axis(instance) == axis + instance for instance in result if get_axis(instance) == axis ] if obj is not None: diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 45c94dbf16..f9f6b8a10f 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -11,7 +11,7 @@ """ from abc import ABC, abstractmethod -from collections import namedtuple +from collections import Iterable, namedtuple from functools import wraps import dask.array as da @@ -20,18 +20,18 @@ from .. import _lazy_data as _lazy from ..common.metadata import ( BaseMetadata, + metadata_filter, metadata_manager_factory, SERVICES, SERVICES_COMBINE, SERVICES_EQUAL, SERVICES_DIFFERENCE, - metadata_filter, ) from ..common.lenient import _lenient_service as lenient_service from ..common.mixin import CFVariableMixin from ..config import get_logger from ..coords import _DimensionalMetadata, AuxCoord -from ..exceptions import CoordinateNotFoundError, ConnectivityNotFoundError +from ..exceptions import ConnectivityNotFoundError, CoordinateNotFoundError from ..util import guess_coord_axis @@ -831,6 +831,7 @@ def __init__( self, topology_dimension, node_coords_and_axes, + connectivities, standard_name=None, long_name=None, var_name=None, @@ -838,7 +839,6 @@ def __init__( attributes=None, edge_coords_and_axes=None, face_coords_and_axes=None, - connectivities=None, node_dimension=None, edge_dimension=None, face_dimension=None, @@ -874,6 +874,12 @@ def normalise(location, axis): raise ValueError(emsg) return f"{location}_{axis}" + if not isinstance(node_coords_and_axes, Iterable): + node_coords_and_axes = [node_coords_and_axes] + + if not isinstance(connectivities, Iterable): + connectivities = [connectivities] + kwargs = {} for coord, axis in node_coords_and_axes: kwargs[normalise("node", axis)] = coord diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py index dafb50554b..b5dad2864c 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -25,6 +25,13 @@ class Test_standard(tests.IrisTest): + def test_instances_non_iterable(self): + item = Mock() + item.name.return_value = "one" + result = metadata_filter(item, item="one") + self.assertEqual(1, len(result)) + self.assertIn(item, result) + def test_name(self): name_one = Mock() name_one.name.return_value = "one" @@ -101,14 +108,25 @@ def test_invalid_attributes(self): attributes="one", ) - def test_axis(self): + def test_axis__by_guess(self): + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes axis_lon = Mock(standard_name="longitude") + del axis_lon.axis axis_lat = Mock(standard_name="latitude") + del axis_lat.axis input_list = [axis_lon, axis_lat] result = metadata_filter(input_list, axis="x") self.assertIn(axis_lon, result) self.assertNotIn(axis_lat, result) + def test_axis__by_member(self): + axis_x = Mock(axis="x") + axis_y = Mock(axis="y") + input_list = [axis_x, axis_y] + result = metadata_filter(input_list, axis="x") + self.assertEqual(1, len(result)) + self.assertIn(axis_x, result) + def test_multiple_args(self): coord_one = Mock(__class__=AuxCoord, long_name="one") coord_two = Mock(__class__=AuxCoord, long_name="two") From ee19869ed1851d6801288c33f8afc48090f9948c Mon Sep 17 00:00:00 2001 From: Bill Little Date: Tue, 23 Feb 2021 10:43:19 +0000 Subject: [PATCH 10/22] add mesh pickle support (#4026) --- lib/iris/experimental/ugrid.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index f9f6b8a10f..f14d2f0a35 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -921,8 +921,11 @@ def __eq__(self, other): return NotImplemented def __getstate__(self): - # TBD - pass + return ( + self._metadata_manager, + self._coord_manager, + self._connectivity_manager, + ) def __ne__(self, other): # TBD @@ -934,8 +937,10 @@ def __repr__(self): return f"{self.__class__.__name__}({', '.join(args)})" def __setstate__(self, state): - # TBD - pass + metadata_manager, coord_manager, connectivity_manager = state + self._metadata_manager = metadata_manager + self._coord_manager = coord_manager + self._connectivity_manager = connectivity_manager def __str__(self): # TBD @@ -1303,8 +1308,7 @@ def __eq__(self, other): return NotImplemented def __getstate__(self): - # TBD - pass + return self._members def __iter__(self): for item in self._members.items(): @@ -1323,8 +1327,7 @@ def __repr__(self): return f"{self.__class__.__name__}({', '.join(args)})" def __setstate__(self, state): - # TBD - pass + self._members = state def __str__(self): args = [ @@ -1707,8 +1710,7 @@ def __eq__(self, other): return NotImplemented def __getstate__(self): - # TBD - pass + return self._members def __iter__(self): for item in self._members.items(): @@ -1727,8 +1729,7 @@ def __repr__(self): return f"{self.__class__.__name__}({', '.join(args)})" def __setstate__(self, state): - # TBD - pass + self._members = state def __str__(self): args = [ From ef4c411f96f29c981bc97bc963b649aae4dd9557 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Tue, 23 Feb 2021 18:15:33 +0000 Subject: [PATCH 11/22] Test Mesh WIP. --- .../unit/experimental/ugrid/test_Mesh.py | 201 ++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100644 lib/iris/tests/unit/experimental/ugrid/test_Mesh.py diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py new file mode 100644 index 0000000000..bbbbf12e96 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -0,0 +1,201 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :class:`iris.experimental.ugrid.Mesh` class.""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from iris.coords import AuxCoord +from iris.experimental import ugrid + +# A collection of minimal coords and connectivities describing an equilateral triangle. +NODE_LON = AuxCoord([0, 2, 1], standard_name="longitude", var_name="node_lon") +NODE_LAT = AuxCoord([0, 0, 1], standard_name="latitude", var_name="node_lat") +EDGE_LON = AuxCoord( + [1, 1.5, 0.5], standard_name="longitude", var_name="edge_lon" +) +EDGE_LAT = AuxCoord( + [0, 0.5, 0.5], standard_name="latitude", var_name="edge_lat" +) +FACE_LON = AuxCoord([0.5], standard_name="longitude", var_name="face_lon") +FACE_LAT = AuxCoord([0.5], standard_name="latitude", var_name="face_lat") + +EDGE_NODE = ugrid.Connectivity( + [[0, 1], [1, 2], [2, 0]], cf_role="edge_node_connectivity" +) +FACE_NODE = ugrid.Connectivity([[0, 1, 2]], cf_role="face_node_connectivity") +FACE_EDGE = ugrid.Connectivity([[0, 1, 2]], cf_role="face_edge_connectivity") +# Actually meaningless: +FACE_FACE = ugrid.Connectivity([[0, 0, 0]], cf_role="face_face_connectivity") +# Actually meaningless: +EDGE_FACE = ugrid.Connectivity( + [[0, 0], [0, 0], [0, 0]], cf_role="edge_face_connectivity" +) +BOUNDARY_NODE = ugrid.Connectivity( + [[0, 1], [1, 2], [2, 0]], cf_role="boundary_node_connectivity" +) + + +class Test1DTopology(tests.IrisTest): + KWARGS = { + "topology_dimension": 1, + "node_coords_and_axes": ((NODE_LON, "x"), (NODE_LAT, "y")), + "connectivities": EDGE_NODE, + "long_name": "my_topology_mesh", + "var_name": "mesh", + "attributes": {"notes": "this is a test"}, + "node_dimension": "NodeDim", + "edge_dimension": "EdgeDim", + "edge_coords_and_axes": ((EDGE_LON, "x"), (EDGE_LAT, "y")), + } + + @classmethod + def setUpClass(cls): + cls.mesh = ugrid.Mesh(**cls.KWARGS) + + def test_all_connectivities(self): + expected = ugrid.Mesh1DConnectivities(EDGE_NODE) + self.assertEqual(expected, self.mesh.all_connectivities) + + def test_all_coords(self): + expected = ugrid.Mesh1DCoords(NODE_LON, NODE_LAT, EDGE_LON, EDGE_LAT) + self.assertEqual(expected, self.mesh.all_coords) + + def test_boundary_node(self): + with self.assertRaises(AttributeError): + _ = self.mesh.boundary_node_connectivity + + def test_edge_dimension(self): + self.assertEqual( + self.KWARGS["edge_dimension"], self.mesh.edge_dimension + ) + + def test_edge_dimension_set(self): + # Don't modify self.mesh, which would prevent re-use. + new_mesh = ugrid.Mesh(**self.KWARGS) + new_mesh.edge_dimension = "foo" + self.assertEqual("foo", new_mesh.edge_dimension) + + def test_edge_coords(self): + expected = ugrid.MeshEdgeCoords(EDGE_LON, EDGE_LAT) + self.assertEqual(expected, self.mesh.edge_coords) + + def test_edge_face(self): + with self.assertRaises(AttributeError): + _ = self.mesh.edge_face_connectivity + + def test_edge_node(self): + self.assertEqual(EDGE_NODE, self.mesh.edge_node_connectivity) + + def test_face_coords(self): + with self.assertRaises(AttributeError): + _ = self.mesh.face_coords + + def test_face_dimension(self): + self.assertIsNone(self.mesh.face_dimension) + + def test_face_dimension_set(self): + # Don't modify self.mesh, which would prevent re-use. + new_mesh = ugrid.Mesh(**self.KWARGS) + with self.assertRaises(ValueError): + new_mesh.face_dimension = "foo" + + def test_face_edge(self): + with self.assertRaises(AttributeError): + _ = self.mesh.face_edge_connectivity + + def test_face_face(self): + with self.assertRaises(AttributeError): + _ = self.mesh.face_face_connectivity + + def test_face_node(self): + with self.assertRaises(AttributeError): + _ = self.mesh.face_node_connectivity + + def test_node_coords(self): + expected = ugrid.MeshNodeCoords(NODE_LON, NODE_LAT) + self.assertEqual(expected, self.mesh.node_coords) + + def test_node_dimension(self): + self.assertEqual( + self.KWARGS["node_dimension"], self.mesh.node_dimension + ) + + def test_node_dimension_set(self): + # Don't modify self.mesh, which would prevent re-use. + new_mesh = ugrid.Mesh(**self.KWARGS) + new_mesh.node_dimension = "foo" + self.assertEqual("foo", new_mesh.node_dimension) + + def test_topology_dimension(self): + self.assertEqual( + self.KWARGS["topology_dimension"], self.mesh.topology_dimension + ) + + +class Test2DTopology(Test1DTopology): + @classmethod + def setUpClass(cls): + cls.KWARGS["topology_dimension"] = 2 + cls.KWARGS["connectivities"] = ( + FACE_NODE, + EDGE_NODE, + FACE_EDGE, + FACE_FACE, + EDGE_FACE, + BOUNDARY_NODE, + ) + cls.KWARGS["face_dimension"] = "FaceDim" + cls.KWARGS["face_coords_and_axes"] = ((FACE_LON, "x"), (FACE_LAT, "y")) + super().setUpClass() + + def test_all_connectivities(self): + expected = ugrid.Mesh2DConnectivities( + FACE_NODE, + EDGE_NODE, + FACE_EDGE, + FACE_FACE, + EDGE_FACE, + BOUNDARY_NODE, + ) + self.assertEqual(expected, self.mesh.all_connectivities) + + def test_all_coords(self): + expected = ugrid.Mesh2DCoords( + NODE_LON, NODE_LAT, EDGE_LON, EDGE_LAT, FACE_LON, FACE_LAT + ) + self.assertEqual(expected, self.mesh.all_coords) + + def test_boundary_node(self): + self.assertEqual(BOUNDARY_NODE, self.mesh.boundary_node_connectivity) + + def test_edge_face(self): + self.assertEqual(EDGE_FACE, self.mesh.edge_face_connectivity) + + def test_face_coords(self): + expected = ugrid.MeshFaceCoords(FACE_LON, FACE_LAT) + self.assertEqual(expected, self.mesh.face_coords) + + def test_face_dimension(self): + self.assertEqual( + self.KWARGS["face_dimension"], self.mesh.face_dimension + ) + + def test_face_dimension_set(self): + # Don't modify self.mesh, which would prevent re-use. + new_mesh = ugrid.Mesh(**self.KWARGS) + new_mesh.face_dimension = "foo" + self.assertEqual("foo", new_mesh.face_dimension) + + def test_face_edge(self): + self.assertEqual(FACE_EDGE, self.mesh.face_edge_connectivity) + + def test_face_face(self): + self.assertEqual(FACE_FACE, self.mesh.face_face_connectivity) + + def test_face_node(self): + self.assertEqual(FACE_NODE, self.mesh.face_node_connectivity) From 597bb1a193626332a9d3496f6322c34ab580be12 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Wed, 24 Feb 2021 11:53:03 +0000 Subject: [PATCH 12/22] Mesh face_dimension not set for topology_dimension=1. --- lib/iris/experimental/ugrid.py | 23 ++++++++++++++++++- .../unit/experimental/ugrid/test_Mesh.py | 4 +++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index f14d2f0a35..1e2d0563a3 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -843,6 +843,18 @@ def __init__( edge_dimension=None, face_dimension=None, ): + """ + .. note:: + + :attr:`node_dimension`, :attr:`edge_dimension` and + :attr:`face_dimension` are stored to help round-tripping of UGRID + files. As such their presence in :class:`Mesh` is not a direct + mirror of that written in the UGRID specification, where + :attr:`node_dimension` is not mentioned, while + :attr:`edge_dimension` is only present for + :attr:`topology_dimension` ``>=2``. + + """ # TODO: support volumes. # TODO: support (coord, "z") @@ -1006,7 +1018,16 @@ def face_dimension(self): @face_dimension.setter def face_dimension(self, name): - if not name or not isinstance(name, str): + if self.topology_dimension < 2: + face_dimension = None + if name: + # Tell the user it is not being set if they expected otherwise. + message = ( + "Not setting face_dimension (inappropriate for " + f"topology_dimension={self.topology_dimension} ." + ) + logger.debug(message) + elif not name or not isinstance(name, str): face_dimension = f"Mesh{self.topology_dimension}d_face" else: face_dimension = name diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index bbbbf12e96..b0bcd07997 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -101,8 +101,10 @@ def test_face_dimension(self): def test_face_dimension_set(self): # Don't modify self.mesh, which would prevent re-use. new_mesh = ugrid.Mesh(**self.KWARGS) - with self.assertRaises(ValueError): + with self.assertLogs(ugrid.logger, level="DEBUG") as log: new_mesh.face_dimension = "foo" + self.assertIn("Not setting face_dimension", log.output[0]) + self.assertIsNone(new_mesh.face_dimension) def test_face_edge(self): with self.assertRaises(AttributeError): From 02f991b13f0a210a2ef69fbe3a3e447c11b6da4d Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Wed, 24 Feb 2021 12:52:23 +0000 Subject: [PATCH 13/22] Mesh testing WIP. --- lib/iris/experimental/ugrid.py | 21 +-- .../unit/experimental/ugrid/test_Mesh.py | 129 ++++++++++++++---- 2 files changed, 114 insertions(+), 36 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 1e2d0563a3..0a2f108145 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -1096,14 +1096,19 @@ def add_coords( face_x=None, face_y=None, ): - self._coord_manager.add( - node_x=node_x, - node_y=node_y, - edge_x=edge_x, - edge_y=edge_y, - face_x=face_x, - face_y=face_y, - ) + # Filter out absent arguments - only expecting face coords sometimes, + # same will be true of volumes in future. + kwargs = { + "node_x": node_x, + "node_y": node_y, + "edge_x": edge_x, + "edge_y": edge_y, + "face_x": face_x, + "face_y": face_y, + } + kwargs = {k: v for k, v in kwargs.items() if v} + + self._coord_manager.add(**kwargs) def add_connectivities(self, *connectivities): self._connectivity_manager.add(*connectivities) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index b0bcd07997..2d172fe0e6 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -9,6 +9,8 @@ # importing anything else. import iris.tests as tests +import numpy as np + from iris.coords import AuxCoord from iris.experimental import ugrid @@ -40,7 +42,11 @@ ) -class Test1DTopology(tests.IrisTest): +class TestProperties1D(tests.IrisTest): + # Tests that can re-use a single instance for greater efficiency. + + # Mesh kwargs with topology_dimension=1 and all applicable arguments + # populated - this tests correct property setting. KWARGS = { "topology_dimension": 1, "node_coords_and_axes": ((NODE_LON, "x"), (NODE_LAT, "y")), @@ -57,6 +63,14 @@ class Test1DTopology(tests.IrisTest): def setUpClass(cls): cls.mesh = ugrid.Mesh(**cls.KWARGS) + def test___getstate__(self): + expected = ( + self.mesh._metadata_manager, + self.mesh._coord_manager, + self.mesh._connectivity_manager, + ) + self.assertEqual(expected, self.mesh.__getstate__()) + def test_all_connectivities(self): expected = ugrid.Mesh1DConnectivities(EDGE_NODE) self.assertEqual(expected, self.mesh.all_connectivities) @@ -74,12 +88,6 @@ def test_edge_dimension(self): self.KWARGS["edge_dimension"], self.mesh.edge_dimension ) - def test_edge_dimension_set(self): - # Don't modify self.mesh, which would prevent re-use. - new_mesh = ugrid.Mesh(**self.KWARGS) - new_mesh.edge_dimension = "foo" - self.assertEqual("foo", new_mesh.edge_dimension) - def test_edge_coords(self): expected = ugrid.MeshEdgeCoords(EDGE_LON, EDGE_LAT) self.assertEqual(expected, self.mesh.edge_coords) @@ -98,14 +106,6 @@ def test_face_coords(self): def test_face_dimension(self): self.assertIsNone(self.mesh.face_dimension) - def test_face_dimension_set(self): - # Don't modify self.mesh, which would prevent re-use. - new_mesh = ugrid.Mesh(**self.KWARGS) - with self.assertLogs(ugrid.logger, level="DEBUG") as log: - new_mesh.face_dimension = "foo" - self.assertIn("Not setting face_dimension", log.output[0]) - self.assertIsNone(new_mesh.face_dimension) - def test_face_edge(self): with self.assertRaises(AttributeError): _ = self.mesh.face_edge_connectivity @@ -127,19 +127,14 @@ def test_node_dimension(self): self.KWARGS["node_dimension"], self.mesh.node_dimension ) - def test_node_dimension_set(self): - # Don't modify self.mesh, which would prevent re-use. - new_mesh = ugrid.Mesh(**self.KWARGS) - new_mesh.node_dimension = "foo" - self.assertEqual("foo", new_mesh.node_dimension) - def test_topology_dimension(self): self.assertEqual( self.KWARGS["topology_dimension"], self.mesh.topology_dimension ) -class Test2DTopology(Test1DTopology): +class TestProperties2D(TestProperties1D): + # Additional/specialised tests for topology_dimension=2. @classmethod def setUpClass(cls): cls.KWARGS["topology_dimension"] = 2 @@ -187,12 +182,6 @@ def test_face_dimension(self): self.KWARGS["face_dimension"], self.mesh.face_dimension ) - def test_face_dimension_set(self): - # Don't modify self.mesh, which would prevent re-use. - new_mesh = ugrid.Mesh(**self.KWARGS) - new_mesh.face_dimension = "foo" - self.assertEqual("foo", new_mesh.face_dimension) - def test_face_edge(self): self.assertEqual(FACE_EDGE, self.mesh.face_edge_connectivity) @@ -201,3 +190,87 @@ def test_face_face(self): def test_face_node(self): self.assertEqual(FACE_NODE, self.mesh.face_node_connectivity) + + +class TestOperations1D(tests.IrisTest): + # Tests that cannot re-use an existing Mesh instance, instead need a new + # one each time. + def setUp(self): + self.mesh = ugrid.Mesh( + topology_dimension=1, + node_coords_and_axes=((NODE_LON, "x"), (NODE_LAT, "y")), + connectivities=EDGE_NODE, + ) + + def test___setstate__(self): + false_metadata_manager = "foo" + false_coord_manager = "bar" + false_connectivity_manager = "baz" + self.mesh.__setstate__( + ( + false_metadata_manager, + false_coord_manager, + false_connectivity_manager, + ) + ) + + self.assertEqual(false_metadata_manager, self.mesh._metadata_manager) + self.assertEqual(false_coord_manager, self.mesh._coord_manager) + self.assertEqual( + false_connectivity_manager, self.mesh._connectivity_manager + ) + + def test_add_coords(self): + # Test coord addition AND replacement. + node_kwargs = { + "node_x": NODE_LON.copy(np.zeros(NODE_LON.shape)), + "node_y": NODE_LAT.copy(np.zeros(NODE_LAT.shape)), + } + edge_kwargs = {"edge_x": EDGE_LON, "edge_y": EDGE_LAT} + self.mesh.add_coords(**node_kwargs, **edge_kwargs) + + self.assertEqual( + ugrid.MeshNodeCoords(**node_kwargs), self.mesh.node_coords + ) + self.assertEqual( + ugrid.MeshEdgeCoords(**edge_kwargs), self.mesh.edge_coords + ) + + def test_add_coords_face(self): + self.assertRaises( + TypeError, self.mesh.add_coords, face_x=FACE_LON, face_y=FACE_LAT + ) + + def test_edge_dimension_set(self): + self.mesh.edge_dimension = "foo" + self.assertEqual("foo", self.mesh.edge_dimension) + + def test_face_dimension_set(self): + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + self.mesh.face_dimension = "foo" + self.assertIn("Not setting face_dimension", log.output[0]) + self.assertIsNone(self.mesh.face_dimension) + + def test_node_dimension_set(self): + self.mesh.node_dimension = "foo" + self.assertEqual("foo", self.mesh.node_dimension) + + +class TestOperations2D(TestOperations1D): + # Additional/specialised tests for topology_dimension=2. + def setUp(self): + self.mesh = ugrid.Mesh( + topology_dimension=2, + node_coords_and_axes=((NODE_LON, "x"), (NODE_LAT, "y")), + connectivities=(FACE_NODE, EDGE_NODE), + ) + + def test_add_coords_face(self): + kwargs = {"face_x": FACE_LON, "face_y": FACE_LAT} + self.mesh.add_coords(**kwargs) + expected = ugrid.MeshFaceCoords(**kwargs) + self.assertEqual(expected, self.mesh.face_coords) + + def test_face_dimension_set(self): + self.mesh.face_dimension = "foo" + self.assertEqual("foo", self.mesh.face_dimension) From 9c2d9ef5e91cc514aadc8d09b22b1bc51e0420bb Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Wed, 24 Feb 2021 18:26:14 +0000 Subject: [PATCH 14/22] Mesh tests WIP. --- lib/iris/experimental/ugrid.py | 19 +- .../unit/experimental/ugrid/test_Mesh.py | 368 +++++++++++++++++- 2 files changed, 366 insertions(+), 21 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 0a2f108145..f0645bc83b 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -1193,9 +1193,9 @@ def coords( var_name=None, attributes=None, axis=None, - node=False, - edge=False, - face=False, + node=None, + edge=None, + face=None, ): return self._coord_manager.filters( item=item, @@ -1559,12 +1559,15 @@ def filters( ): # TBD: support coord_systems? - # rationalise the tri-state behaviour args = [node, edge, face] - state = not any(set(filter(lambda arg: arg is not None, args))) - node, edge, face = map( - lambda arg: arg if arg is not None else state, args - ) + true_count = args.count(True) + if true_count > 1: + # Standard filter behaviour is 'AND', and coord locations are + # mutually exclusive, so multiple True cannot return any results. + node = edge = face = False + elif true_count == 0: + # Treat None as True in this case. + node, edge, face = [True if arg is None else arg for arg in args] def populated_coords(coords_tuple): return list(filter(None, list(coords_tuple))) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index 2d172fe0e6..3074169e04 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -15,7 +15,13 @@ from iris.experimental import ugrid # A collection of minimal coords and connectivities describing an equilateral triangle. -NODE_LON = AuxCoord([0, 2, 1], standard_name="longitude", var_name="node_lon") +NODE_LON = AuxCoord( + [0, 2, 1], + standard_name="longitude", + long_name="long_name", + var_name="node_lon", + attributes={"test": 1}, +) NODE_LAT = AuxCoord([0, 0, 1], standard_name="latitude", var_name="node_lat") EDGE_LON = AuxCoord( [1, 1.5, 0.5], standard_name="longitude", var_name="edge_lon" @@ -27,7 +33,11 @@ FACE_LAT = AuxCoord([0.5], standard_name="latitude", var_name="face_lat") EDGE_NODE = ugrid.Connectivity( - [[0, 1], [1, 2], [2, 0]], cf_role="edge_node_connectivity" + [[0, 1], [1, 2], [2, 0]], + cf_role="edge_node_connectivity", + long_name="long_name", + var_name="var_name", + attributes={"test": 1}, ) FACE_NODE = ugrid.Connectivity([[0, 1, 2]], cf_role="face_node_connectivity") FACE_EDGE = ugrid.Connectivity([[0, 1, 2]], cf_role="face_edge_connectivity") @@ -83,6 +93,106 @@ def test_boundary_node(self): with self.assertRaises(AttributeError): _ = self.mesh.boundary_node_connectivity + def test_connectivities(self): + # General results. Method intended for inheritance. + positive_kwargs = ( + {"item": EDGE_NODE}, + {"item": "long_name"}, + {"long_name": "long_name"}, + {"var_name": "var_name"}, + {"attributes": {"test": 1}}, + {"cf_role": "edge_node_connectivity"}, + ) + + fake_connectivity = tests.mock.Mock( + __class__=ugrid.Connectivity, cf_role="fake" + ) + negative_kwargs = ( + {"item": fake_connectivity}, + {"item": "foo"}, + {"standard_name": "air_temperature"}, + {"long_name": "foo"}, + {"var_name": "foo"}, + {"attributes": {"test": 2}}, + {"cf_role": "foo"}, + ) + + func = self.mesh.connectivities + for kwargs in positive_kwargs: + self.assertEqual( + EDGE_NODE, func(**kwargs)["edge_node_connectivity"] + ) + for kwargs in negative_kwargs: + self.assertNotIn("edge_node_connectivity", func(**kwargs)) + + def test_connectivities_locations(self): + # topology_dimension-specific results. Method intended to be overridden. + expected = {EDGE_NODE.cf_role: EDGE_NODE} + func = self.mesh.connectivities + self.assertEqual(expected, func(node=True)) + self.assertEqual(expected, func(edge=True)) + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + self.assertEqual({}, func(face=True)) + self.assertIn("filter for non-existent", log.output[0]) + + def test_coords(self): + # General results. Method intended for inheritance. + positive_kwargs = ( + {"item": NODE_LON}, + {"item": "longitude"}, + {"standard_name": "longitude"}, + {"long_name": "long_name"}, + {"var_name": "node_lon"}, + {"attributes": {"test": 1}}, + ) + + fake_coord = AuxCoord([0]) + negative_kwargs = ( + {"item": fake_coord}, + {"item": "foo"}, + {"standard_name": "air_temperature"}, + {"long_name": "foo"}, + {"var_name": "foo"}, + {"attributes": {"test": 2}}, + ) + + func = self.mesh.coords + for kwargs in positive_kwargs: + self.assertEqual(NODE_LON, func(**kwargs)["node_x"]) + for kwargs in negative_kwargs: + self.assertNotIn("node_x", func(**kwargs)) + + def test_coords_locations(self): + # topology_dimension-specific results. Method intended to be overridden. + all_expected = { + "node_x": NODE_LON, + "node_y": NODE_LAT, + "edge_x": EDGE_LON, + "edge_y": EDGE_LAT, + } + + kwargs_expected = ( + ({"axis": "x"}, ("node_x", "edge_x")), + ({"axis": "y"}, ("node_y", "edge_y")), + ({"node": True}, ("node_x", "node_y")), + ({"edge": True}, ("edge_x", "edge_y")), + ({"node": False}, ("edge_x", "edge_y")), + ({"edge": False}, ("node_x", "node_y")), + ({"node": True, "edge": True}, []), + ({"node": False, "edge": False}, []), + ) + + func = self.mesh.coords + for kwargs, expected in kwargs_expected: + expected = { + k: all_expected[k] for k in expected if k in all_expected + } + self.assertEqual(expected, func(**kwargs)) + + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + self.assertEqual({}, func(face=True)) + self.assertIn("filter non-existent", log.output[0]) + def test_edge_dimension(self): self.assertEqual( self.KWARGS["edge_dimension"], self.mesh.edge_dimension @@ -170,6 +280,51 @@ def test_all_coords(self): def test_boundary_node(self): self.assertEqual(BOUNDARY_NODE, self.mesh.boundary_node_connectivity) + def test_connectivities_locations(self): + kwargs_expected = ( + ({"node": True}, (EDGE_NODE, FACE_NODE, BOUNDARY_NODE)), + ({"edge": True}, (EDGE_NODE, FACE_EDGE, EDGE_FACE)), + ({"face": True}, (FACE_NODE, FACE_EDGE, FACE_FACE, EDGE_FACE)), + ({"node": False}, (FACE_EDGE, EDGE_FACE, FACE_FACE)), + ({"edge": False}, (FACE_NODE, BOUNDARY_NODE, FACE_FACE)), + ({"face": False}, (EDGE_NODE, BOUNDARY_NODE)), + ({"edge": True, "face": True}, (FACE_EDGE, EDGE_FACE)), + ({"node": False, "edge": False}, (FACE_FACE,)), + ) + func = self.mesh.connectivities + for kwargs, expected in kwargs_expected: + expected = {c.cf_role: c for c in expected} + self.assertEqual(expected, func(**kwargs)) + + def test_coords_locations(self): + all_expected = { + "node_x": NODE_LON, + "node_y": NODE_LAT, + "edge_x": EDGE_LON, + "edge_y": EDGE_LAT, + "face_x": FACE_LON, + "face_y": FACE_LAT, + } + + kwargs_expected = ( + ({"axis": "x"}, ("node_x", "edge_x", "face_x")), + ({"axis": "y"}, ("node_y", "edge_y", "face_y")), + ({"node": True}, ("node_x", "node_y")), + ({"edge": True}, ("edge_x", "edge_y")), + ({"node": False}, ("edge_x", "edge_y", "face_x", "face_y")), + ({"edge": False}, ("node_x", "node_y", "face_x", "face_y")), + ({"face": False}, ("node_x", "node_y", "edge_x", "edge_y")), + ({"face": True, "edge": True}, []), + ({"face": False, "edge": False}, ["node_x", "node_y"]), + ) + + func = self.mesh.coords + for kwargs, expected in kwargs_expected: + expected = { + k: all_expected[k] for k in expected if k in all_expected + } + self.assertEqual(expected, func(**kwargs)) + def test_edge_face(self): self.assertEqual(EDGE_FACE, self.mesh.edge_face_connectivity) @@ -202,6 +357,25 @@ def setUp(self): connectivities=EDGE_NODE, ) + @staticmethod + def new_connectivity(connectivity, new_len=False): + """Provide a new connectivity recognisably different from the original.""" + # NOTE: assumes non-transposed connectivity (src_dim=0). + if new_len: + shape = (connectivity.shape[0] + 1, connectivity.shape[1]) + else: + shape = connectivity.shape + return connectivity.copy(np.zeros(shape, dtype=int)) + + @staticmethod + def new_coord(coord, new_shape=False): + """Provide a new coordinate recognisably different from the original.""" + if new_shape: + shape = tuple([i + 1 for i in coord.shape]) + else: + shape = coord.shape + return coord.copy(np.zeros(shape)) + def test___setstate__(self): false_metadata_manager = "foo" false_coord_manager = "bar" @@ -220,27 +394,101 @@ def test___setstate__(self): false_connectivity_manager, self.mesh._connectivity_manager ) - def test_add_coords(self): - # Test coord addition AND replacement. - node_kwargs = { - "node_x": NODE_LON.copy(np.zeros(NODE_LON.shape)), - "node_y": NODE_LAT.copy(np.zeros(NODE_LAT.shape)), - } - edge_kwargs = {"edge_x": EDGE_LON, "edge_y": EDGE_LAT} - self.mesh.add_coords(**node_kwargs, **edge_kwargs) + def test_add_connectivities(self): + # Cannot test ADD - 1D - nothing extra to add beyond minimum. + + for new_len in (False, True): + # REPLACE connectivities, first with one of the same length, then + # with one of different length. + edge_node = self.new_connectivity(EDGE_NODE, new_len) + self.mesh.add_connectivities(edge_node) + self.assertEqual( + ugrid.Mesh1DConnectivities(edge_node), + self.mesh.all_connectivities, + ) + def test_add_connectivities_duplicates(self): + edge_node_one = EDGE_NODE + edge_node_two = self.new_connectivity(EDGE_NODE) + self.mesh.add_connectivities(edge_node_one, edge_node_two) self.assertEqual( - ugrid.MeshNodeCoords(**node_kwargs), self.mesh.node_coords + edge_node_two, + self.mesh.edge_node_connectivity, ) + + def test_add_connectivities_invalid(self): + face_node = FACE_NODE + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + self.mesh.add_connectivities(face_node) + self.assertIn("Not adding connectivity", log.output[0]) + + def test_add_coords(self): + # ADD coords. + edge_kwargs = {"edge_x": EDGE_LON, "edge_y": EDGE_LAT} + self.mesh.add_coords(**edge_kwargs) self.assertEqual( ugrid.MeshEdgeCoords(**edge_kwargs), self.mesh.edge_coords ) + for new_shape in (False, True): + # REPLACE coords, first with ones of the same shape, then with ones + # of different shape. + node_kwargs = { + "node_x": self.new_coord(NODE_LON, new_shape), + "node_y": self.new_coord(NODE_LAT, new_shape), + } + edge_kwargs = { + "edge_x": self.new_coord(EDGE_LON, new_shape), + "edge_y": self.new_coord(EDGE_LAT, new_shape), + } + self.mesh.add_coords(**node_kwargs, **edge_kwargs) + self.assertEqual( + ugrid.MeshNodeCoords(**node_kwargs), self.mesh.node_coords + ) + self.assertEqual( + ugrid.MeshEdgeCoords(**edge_kwargs), self.mesh.edge_coords + ) + def test_add_coords_face(self): self.assertRaises( TypeError, self.mesh.add_coords, face_x=FACE_LON, face_y=FACE_LAT ) + def test_add_coords_single(self): + # ADD coord. + edge_x = EDGE_LON + expected = ugrid.MeshEdgeCoords(edge_x=edge_x, edge_y=None) + self.mesh.add_coords(edge_x=edge_x) + self.assertEqual(expected, self.mesh.edge_coords) + + # REPLACE coords. + node_x = self.new_coord(NODE_LON) + edge_x = self.new_coord(EDGE_LON) + expected_nodes = ugrid.MeshNodeCoords( + node_x=node_x, node_y=self.mesh.node_coords.node_y + ) + expected_edges = ugrid.MeshEdgeCoords(edge_x=edge_x, edge_y=None) + self.mesh.add_coords(node_x=node_x, edge_x=edge_x) + self.assertEqual(expected_nodes, self.mesh.node_coords) + self.assertEqual(expected_edges, self.mesh.edge_coords) + + # Attempt to REPLACE coords with those of DIFFERENT SHAPE. + node_x = self.new_coord(NODE_LON, new_shape=True) + edge_x = self.new_coord(EDGE_LON, new_shape=True) + node_kwarg = {"node_x": node_x} + edge_kwarg = {"edge_x": edge_x} + both_kwargs = dict(**node_kwarg, **edge_kwarg) + for kwargs in (node_kwarg, edge_kwarg, both_kwargs): + self.assertRaisesRegex( + ValueError, + ".*requires to have shape.*", + self.mesh.add_coords, + **kwargs, + ) + + def test_add_coords_single_face(self): + self.assertRaises(TypeError, self.mesh.add_coords, face_x=FACE_LON) + def test_edge_dimension_set(self): self.mesh.edge_dimension = "foo" self.assertEqual("foo", self.mesh.edge_dimension) @@ -262,15 +510,109 @@ def setUp(self): self.mesh = ugrid.Mesh( topology_dimension=2, node_coords_and_axes=((NODE_LON, "x"), (NODE_LAT, "y")), - connectivities=(FACE_NODE, EDGE_NODE), + connectivities=(FACE_NODE), + ) + + def test_add_connectivities(self): + # ADD connectivities. + kwargs = { + "edge_node": EDGE_NODE, + "face_edge": FACE_EDGE, + "face_face": FACE_FACE, + "edge_face": EDGE_FACE, + "boundary_node": BOUNDARY_NODE, + } + expected = ugrid.Mesh2DConnectivities( + face_node=self.mesh.face_node_connectivity, **kwargs ) + self.mesh.add_connectivities(*kwargs.values()) + self.assertEqual(expected, self.mesh.all_connectivities) + + # REPLACE connectivities. + kwargs["face_node"] = FACE_NODE + for new_len in (False, True): + # First replace with ones of same length, then with ones of + # different length. + kwargs = { + k: self.new_connectivity(v, new_len) for k, v in kwargs.items() + } + self.mesh.add_connectivities(*kwargs.values()) + self.assertEqual( + ugrid.Mesh2DConnectivities(**kwargs), + self.mesh.all_connectivities, + ) + + def test_add_connectivities_inconsistent(self): + # ADD Connectivities. + self.mesh.add_connectivities(EDGE_NODE) + face_edge = self.new_connectivity(FACE_EDGE, new_len=True) + edge_face = self.new_connectivity(EDGE_FACE, new_len=True) + for args in ([face_edge], [edge_face], [face_edge, edge_face]): + self.assertRaisesRegex( + ValueError, + "inconsistent .* counts.", + self.mesh.add_connectivities, + *args, + ) + + # REPLACE Connectivities + self.mesh.add_connectivities(FACE_EDGE, EDGE_FACE) + for args in ([face_edge], [edge_face], [face_edge, edge_face]): + self.assertRaisesRegex( + ValueError, + "inconsistent .* counts.", + self.mesh.add_connectivities, + *args, + ) + + def test_add_connectivities_invalid(self): + fake_cf_role = tests.mock.Mock( + __class__=ugrid.Connectivity, cf_role="foo" + ) + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + self.mesh.add_connectivities(fake_cf_role) + self.assertIn("Not adding connectivity", log.output[0]) def test_add_coords_face(self): + # ADD coords. kwargs = {"face_x": FACE_LON, "face_y": FACE_LAT} self.mesh.add_coords(**kwargs) - expected = ugrid.MeshFaceCoords(**kwargs) + self.assertEqual(ugrid.MeshFaceCoords(**kwargs), self.mesh.face_coords) + + for new_shape in (False, True): + # REPLACE coords, first with ones of the same shape, then with ones + # of different shape. + kwargs = { + "face_x": self.new_coord(FACE_LON, new_shape), + "face_y": self.new_coord(FACE_LAT, new_shape), + } + self.mesh.add_coords(**kwargs) + self.assertEqual( + ugrid.MeshFaceCoords(**kwargs), self.mesh.face_coords + ) + + def test_add_coords_single_face(self): + # ADD coord. + face_x = FACE_LON + expected = ugrid.MeshFaceCoords(face_x=face_x, face_y=None) + self.mesh.add_coords(face_x=face_x) self.assertEqual(expected, self.mesh.face_coords) + # REPLACE coord. + face_x = self.new_coord(FACE_LON) + expected = ugrid.MeshFaceCoords(face_x=face_x, face_y=None) + self.mesh.add_coords(face_x=face_x) + self.assertEqual(expected, self.mesh.face_coords) + + # Attempt to REPLACE coord with that of DIFFERENT SHAPE. + face_x = self.new_coord(FACE_LON, new_shape=True) + self.assertRaisesRegex( + ValueError, + ".*requires to have shape.*", + self.mesh.add_coords, + face_x=face_x, + ) + def test_face_dimension_set(self): self.mesh.face_dimension = "foo" self.assertEqual("foo", self.mesh.face_dimension) From 583ae4a084959f88cd358363544f299f6dc72645 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Thu, 25 Feb 2021 02:12:41 +0000 Subject: [PATCH 15/22] Mesh tests WIP. --- lib/iris/experimental/ugrid.py | 30 ++- .../unit/experimental/ugrid/test_Mesh.py | 234 +++++++++++++++++- 2 files changed, 250 insertions(+), 14 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index f0645bc83b..ad5f291128 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -1245,17 +1245,22 @@ def remove_coords( edge=None, face=None, ): - return self._coord_manager.remove( - item=item, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - attributes=attributes, - axis=axis, - node=node, - edge=edge, - face=face, - ) + # Filter out absent arguments - only expecting face coords sometimes, + # same will be true of volumes in future. + kwargs = { + "item": item, + "standard_name": standard_name, + "long_name": long_name, + "var_name": var_name, + "attributes": attributes, + "axis": axis, + "node": node, + "edge": edge, + "face": face, + } + kwargs = {k: v for k, v in kwargs.items() if v} + + return self._coord_manager.remove(**kwargs) def xml_element(self): # TBD @@ -1559,6 +1564,7 @@ def filters( ): # TBD: support coord_systems? + face_requested = face is True args = [node, edge, face] true_count = args.count(True) if true_count > 1: @@ -1580,7 +1586,7 @@ def populated_coords(coords_tuple): if hasattr(self, "face_coords"): if face: members += populated_coords(self.face_coords) - else: + elif face_requested: dmsg = "Ignoring request to filter non-existent 'face_coords'" logger.debug(dmsg, extra=dict(cls=self.__class__.__name__)) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index 3074169e04..8e36011661 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -73,6 +73,12 @@ class TestProperties1D(tests.IrisTest): def setUpClass(cls): cls.mesh = ugrid.Mesh(**cls.KWARGS) + def test__metadata_manager(self): + self.assertEqual( + self.mesh._metadata_manager.cls.__name__, + ugrid.MeshMetadata.__name__, + ) + def test___getstate__(self): expected = ( self.mesh._metadata_manager, @@ -93,6 +99,11 @@ def test_boundary_node(self): with self.assertRaises(AttributeError): _ = self.mesh.boundary_node_connectivity + def test_cf_role(self): + self.assertEqual("mesh_topology", self.mesh.cf_role) + # Read only. + self.assertRaises(AttributeError, setattr, self.mesh.cf_role, "foo", 1) + def test_connectivities(self): # General results. Method intended for inheritance. positive_kwargs = ( @@ -127,10 +138,25 @@ def test_connectivities(self): def test_connectivities_locations(self): # topology_dimension-specific results. Method intended to be overridden. + positive_kwargs = ( + {"node": True}, + {"edge": True}, + {"node": True, "edge": True}, + ) + negative_kwargs = ( + {"node": False}, + {"edge": False}, + {"edge": True, "node": False}, + {"edge": False, "node": False}, + ) + expected = {EDGE_NODE.cf_role: EDGE_NODE} func = self.mesh.connectivities - self.assertEqual(expected, func(node=True)) - self.assertEqual(expected, func(edge=True)) + for kwargs in positive_kwargs: + self.assertEqual(expected, func(**kwargs)) + for kwargs in negative_kwargs: + self.assertEqual({}, func(**kwargs)) + with self.assertLogs(ugrid.logger, level="DEBUG") as log: self.assertEqual({}, func(face=True)) self.assertIn("filter for non-existent", log.output[0]) @@ -180,6 +206,7 @@ def test_coords_locations(self): ({"edge": False}, ("node_x", "node_y")), ({"node": True, "edge": True}, []), ({"node": False, "edge": False}, []), + ({"node": False, "edge": True}, ("edge_x", "edge_y")), ) func = self.mesh.coords @@ -241,6 +268,10 @@ def test_topology_dimension(self): self.assertEqual( self.KWARGS["topology_dimension"], self.mesh.topology_dimension ) + # Read only. + self.assertRaises( + AttributeError, setattr, self.mesh.topology_dimension, "foo", 1 + ) class TestProperties2D(TestProperties1D): @@ -290,6 +321,8 @@ def test_connectivities_locations(self): ({"face": False}, (EDGE_NODE, BOUNDARY_NODE)), ({"edge": True, "face": True}, (FACE_EDGE, EDGE_FACE)), ({"node": False, "edge": False}, (FACE_FACE,)), + ({"node": True, "edge": False}, (FACE_NODE, BOUNDARY_NODE)), + ({"node": False, "edge": False, "face": False}, []), ) func = self.mesh.connectivities for kwargs, expected in kwargs_expected: @@ -316,6 +349,7 @@ def test_coords_locations(self): ({"face": False}, ("node_x", "node_y", "edge_x", "edge_y")), ({"face": True, "edge": True}, []), ({"face": False, "edge": False}, ["node_x", "node_y"]), + ({"face": False, "edge": True}, ["edge_x", "edge_y"]), ) func = self.mesh.coords @@ -489,6 +523,27 @@ def test_add_coords_single(self): def test_add_coords_single_face(self): self.assertRaises(TypeError, self.mesh.add_coords, face_x=FACE_LON) + def test_dimension_names(self): + # Test defaults. + default = ugrid.Mesh1DNames("Mesh1d_node", "Mesh1d_edge") + self.assertEqual(default, self.mesh.dimension_names()) + + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + self.mesh.dimension_names("foo", "bar", "baz") + self.assertIn("Not setting face_dimension", log.output[0]) + self.assertEqual( + ugrid.Mesh1DNames("foo", "bar"), self.mesh.dimension_names() + ) + + self.mesh.dimension_names_reset(True, True, True) + self.assertEqual(default, self.mesh.dimension_names()) + + # Single. + self.mesh.dimension_names(edge="foo") + self.assertEqual("foo", self.mesh.edge_dimension) + self.mesh.dimension_names_reset(edge=True) + self.assertEqual(default, self.mesh.dimension_names()) + def test_edge_dimension_set(self): self.mesh.edge_dimension = "foo" self.assertEqual("foo", self.mesh.edge_dimension) @@ -503,6 +558,99 @@ def test_node_dimension_set(self): self.mesh.node_dimension = "foo" self.assertEqual("foo", self.mesh.node_dimension) + def test_remove_connectivities(self): + """ + Test that remove() mimics the connectivities() method correctly, + and prevents removal of mandatory connectivities. + + """ + positive_kwargs = ( + {"item": EDGE_NODE}, + {"item": "long_name"}, + {"long_name": "long_name"}, + {"var_name": "var_name"}, + {"attributes": {"test": 1}}, + {"cf_role": "edge_node_connectivity"}, + {"node": True}, + {"edge": True}, + {"edge": True, "node": True}, + ) + + fake_connectivity = tests.mock.Mock( + __class__=ugrid.Connectivity, cf_role="fake" + ) + negative_kwargs = ( + {"item": fake_connectivity}, + {"item": "foo"}, + {"standard_name": "air_temperature"}, + {"long_name": "foo"}, + {"var_name": "foo"}, + {"attributes": {"test": 2}}, + {"cf_role": "foo"}, + {"node": False}, + {"edge": False}, + {"edge": True, "node": False}, + {"edge": False, "node": False}, + ) + + for kwargs in positive_kwargs: + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + self.mesh.remove_connectivities(**kwargs) + self.assertIn("Ignoring request to remove", log.output[0]) + self.assertEqual(EDGE_NODE, self.mesh.edge_node_connectivity) + for kwargs in negative_kwargs: + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + # Check that the only debug log is the one we inserted. + ugrid.logger.debug("foo") + self.mesh.remove_connectivities(**kwargs) + self.assertEqual(1, len(log.output)) + self.assertEqual(EDGE_NODE, self.mesh.edge_node_connectivity) + + def test_remove_coords(self): + # Test that remove() mimics the coords() method correctly, + # and prevents removal of mandatory coords. + positive_kwargs = ( + {"item": NODE_LON}, + {"item": "longitude"}, + {"standard_name": "longitude"}, + {"long_name": "long_name"}, + {"var_name": "node_lon"}, + {"attributes": {"test": 1}}, + ) + + fake_coord = AuxCoord([0]) + negative_kwargs = ( + {"item": fake_coord}, + {"item": "foo"}, + {"standard_name": "air_temperature"}, + {"long_name": "foo"}, + {"var_name": "foo"}, + {"attributes": {"test": 2}}, + ) + + for kwargs in positive_kwargs: + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + self.mesh.remove_coords(**kwargs) + self.assertIn("Ignoring request to remove", log.output[0]) + self.assertEqual(NODE_LON, self.mesh.node_coords.node_x) + for kwargs in negative_kwargs: + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + # Check that the only debug log is the one we inserted. + ugrid.logger.debug("foo") + self.mesh.remove_coords(**kwargs) + self.assertEqual(1, len(log.output)) + self.assertEqual(NODE_LON, self.mesh.node_coords.node_x) + + # Test removal of optional connectivity. + self.mesh.add_coords(edge_x=EDGE_LON) + # Attempt to remove a non-existent coord. + self.mesh.remove_coords(EDGE_LAT) + # Confirm that EDGE_LON is still there. + self.assertEqual(EDGE_LON, self.mesh.edge_coords.edge_x) + # Remove EDGE_LON and confirm success. + self.mesh.remove_coords(EDGE_LON) + self.assertEqual(None, self.mesh.edge_coords.edge_x) + class TestOperations2D(TestOperations1D): # Additional/specialised tests for topology_dimension=2. @@ -613,6 +761,88 @@ def test_add_coords_single_face(self): face_x=face_x, ) + def test_dimension_names(self): + # Test defaults. + default = ugrid.Mesh2DNames( + "Mesh2d_node", "Mesh2d_edge", "Mesh2d_face" + ) + self.assertEqual(default, self.mesh.dimension_names()) + + self.mesh.dimension_names("foo", "bar", "baz") + self.assertEqual( + ugrid.Mesh2DNames("foo", "bar", "baz"), self.mesh.dimension_names() + ) + + self.mesh.dimension_names_reset(True, True, True) + self.assertEqual(default, self.mesh.dimension_names()) + + # Single. + self.mesh.dimension_names(face="foo") + self.assertEqual("foo", self.mesh.face_dimension) + self.mesh.dimension_names_reset(face=True) + self.assertEqual(default, self.mesh.dimension_names()) + def test_face_dimension_set(self): self.mesh.face_dimension = "foo" self.assertEqual("foo", self.mesh.face_dimension) + + def test_remove_connectivities(self): + """Do what 1D test could not - test removal of optional connectivity.""" + + # Add an optional connectivity. + self.mesh.add_connectivities(FACE_FACE) + # Attempt to remove a non-existent connectivity. + self.mesh.remove_connectivities(EDGE_NODE) + # Confirm that FACE_FACE is still there. + self.assertEqual(FACE_FACE, self.mesh.face_face_connectivity) + # Remove FACE_FACE and confirm success. + self.mesh.remove_connectivities(face=True) + self.assertEqual(None, self.mesh.face_face_connectivity) + + def test_remove_coords(self): + """Test the face argument.""" + super().test_remove_coords() + self.mesh.add_coords(face_x=FACE_LON) + self.assertEqual(FACE_LON, self.mesh.face_coords.face_x) + self.mesh.remove_coords(face=True) + self.assertEqual(None, self.mesh.face_coords.face_x) + + +class InitValidation(tests.IrisTest): + def test_invalid_topology(self): + kwargs = { + "topology_dimension": 0, + "node_coords_and_axes": ((NODE_LON, "x"), (NODE_LAT, "y")), + "connectivities": EDGE_NODE, + } + self.assertRaisesRegex( + ValueError, "Expected 'topology_dimension'.*", ugrid.Mesh, **kwargs + ) + + def test_invalid_axes(self): + kwargs = { + "topology_dimension": 2, + "connectivities": FACE_NODE, + } + self.assertRaisesRegex( + ValueError, + "Invalid axis specified for node.*", + ugrid.Mesh, + node_coords_and_axes=((NODE_LON, "foo"), (NODE_LAT, "y")), + **kwargs, + ) + kwargs["node_coords_and_axes"] = (((NODE_LON, "x"), (NODE_LAT, "y")),) + self.assertRaisesRegex( + ValueError, + "Invalid axis specified for edge.*", + ugrid.Mesh, + edge_coords_and_axes=((EDGE_LON, "foo"), (EDGE_LAT, "y")), + **kwargs, + ) + self.assertRaisesRegex( + ValueError, + "Invalid axis specified for face.*", + ugrid.Mesh, + face_coords_and_axes=((FACE_LON, "foo"), (FACE_LAT, "y")), + **kwargs, + ) From a1cb54dde6f5402b4ff2d768b94f568d6e7b13a2 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Thu, 25 Feb 2021 07:42:19 +0000 Subject: [PATCH 16/22] Mesh tests complete. --- lib/iris/experimental/ugrid.py | 8 +- .../unit/experimental/ugrid/test_Mesh.py | 89 ++++++++++++++++++- 2 files changed, 89 insertions(+), 8 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index ad5f291128..1cb4d6b297 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -1566,7 +1566,7 @@ def filters( face_requested = face is True args = [node, edge, face] - true_count = args.count(True) + true_count = len([arg for arg in args if arg]) if true_count > 1: # Standard filter behaviour is 'AND', and coord locations are # mutually exclusive, so multiple True cannot return any results. @@ -1731,9 +1731,7 @@ def __init__(self, *connectivities): cf_roles = [c.cf_role for c in connectivities] for requisite in self.REQUIRED: if requisite not in cf_roles: - message = ( - f"{self.__name__} requires a {requisite} Connectivity." - ) + message = f"{type(self).__name__} requires a {requisite} Connectivity." raise ValueError(message) self.ALL = self.REQUIRED + self.OPTIONAL @@ -1789,7 +1787,7 @@ def add(self, *connectivities): for connectivity in connectivities: if not isinstance(connectivity, Connectivity): message = f"Expected Connectivity, got: {type(connectivity)} ." - raise ValueError(message) + raise TypeError(message) cf_role = connectivity.cf_role if cf_role not in self.ALL: message = ( diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index 8e36011661..94cc8f2329 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -12,6 +12,7 @@ import numpy as np from iris.coords import AuxCoord +from iris.exceptions import ConnectivityNotFoundError, CoordinateNotFoundError from iris.experimental import ugrid # A collection of minimal coords and connectivities describing an equilateral triangle. @@ -161,6 +162,13 @@ def test_connectivities_locations(self): self.assertEqual({}, func(face=True)) self.assertIn("filter for non-existent", log.output[0]) + def test_coord(self): + # See Mesh.coords tests for thorough coverage of cases. + func = self.mesh.coord + exception = CoordinateNotFoundError + self.assertRaisesRegex(exception, ".*but found 2", func, node=True) + self.assertRaisesRegex(exception, ".*but found none", func, axis="t") + def test_coords(self): # General results. Method intended for inheritance. positive_kwargs = ( @@ -311,6 +319,21 @@ def test_all_coords(self): def test_boundary_node(self): self.assertEqual(BOUNDARY_NODE, self.mesh.boundary_node_connectivity) + def test_connectivity(self): + # See Mesh.connectivities tests for thorough coverage of cases. + # Can only test Mesh.connectivity for 2D since we need >1 connectivity. + func = self.mesh.connectivity + exception = ConnectivityNotFoundError + self.assertRaisesRegex(exception, ".*but found 3", func, node=True) + self.assertRaisesRegex( + exception, + ".*but found none", + func, + node=False, + edge=False, + face=False, + ) + def test_connectivities_locations(self): kwargs_expected = ( ({"node": True}, (EDGE_NODE, FACE_NODE, BOUNDARY_NODE)), @@ -451,6 +474,13 @@ def test_add_connectivities_duplicates(self): ) def test_add_connectivities_invalid(self): + self.assertRaisesRegex( + TypeError, + "Expected Connectivity.*", + self.mesh.add_connectivities, + "foo", + ) + face_node = FACE_NODE with self.assertLogs(ugrid.logger, level="DEBUG") as log: self.mesh.add_connectivities(face_node) @@ -488,6 +518,32 @@ def test_add_coords_face(self): TypeError, self.mesh.add_coords, face_x=FACE_LON, face_y=FACE_LAT ) + def test_add_coords_invalid(self): + func = self.mesh.add_coords + self.assertRaisesRegex( + TypeError, ".*requires to be an 'AuxCoord'.*", func, node_x="foo" + ) + self.assertRaisesRegex( + TypeError, ".*requires a x-axis like.*", func, node_x=NODE_LAT + ) + climatological = AuxCoord( + [0], + bounds=[-1, 1], + standard_name="longitude", + climatological=True, + units="Days since 1970", + ) + self.assertRaisesRegex( + TypeError, + ".*cannot be a climatological.*", + func, + node_x=climatological, + ) + wrong_shape = NODE_LON.copy([0]) + self.assertRaisesRegex( + ValueError, ".*requires to have shape.*", func, node_x=wrong_shape + ) + def test_add_coords_single(self): # ADD coord. edge_x = EDGE_LON @@ -831,18 +887,45 @@ def test_invalid_axes(self): node_coords_and_axes=((NODE_LON, "foo"), (NODE_LAT, "y")), **kwargs, ) - kwargs["node_coords_and_axes"] = (((NODE_LON, "x"), (NODE_LAT, "y")),) + kwargs["node_coords_and_axes"] = ((NODE_LON, "x"), (NODE_LAT, "y")) self.assertRaisesRegex( ValueError, "Invalid axis specified for edge.*", ugrid.Mesh, - edge_coords_and_axes=((EDGE_LON, "foo"), (EDGE_LAT, "y")), + edge_coords_and_axes=((EDGE_LON, "foo"),), **kwargs, ) self.assertRaisesRegex( ValueError, "Invalid axis specified for face.*", ugrid.Mesh, - face_coords_and_axes=((FACE_LON, "foo"), (FACE_LAT, "y")), + face_coords_and_axes=((FACE_LON, "foo"),), + **kwargs, + ) + + # Several arg safety checks in __init__ currently unreachable given earlier checks. + + def test_minimum_connectivities(self): + # Further validations are tested in add_connectivity tests. + kwargs = { + "topology_dimension": 1, + "node_coords_and_axes": ((NODE_LON, "x"), (NODE_LAT, "y")), + "connectivities": (FACE_NODE,), + } + self.assertRaisesRegex( + ValueError, + ".*requires a edge_node_connectivity.*", + ugrid.Mesh, **kwargs, ) + + def test_minimum_coords(self): + # Further validations are tested in add_coord tests. + kwargs = { + "topology_dimension": 1, + "node_coords_and_axes": ((NODE_LON, "x"), (None, "y")), + "connectivities": (FACE_NODE,), + } + self.assertRaisesRegex( + ValueError, ".*is a required coordinate.*", ugrid.Mesh, **kwargs + ) From 9217a5985a93dac2f447a2a69a16d641f7d7ac7d Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Thu, 25 Feb 2021 08:32:50 +0000 Subject: [PATCH 17/22] Mesh repr tests. --- .../unit/experimental/ugrid/test_Mesh.py | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index 94cc8f2329..6e3a831bd2 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -88,6 +88,25 @@ def test___getstate__(self): ) self.assertEqual(expected, self.mesh.__getstate__()) + def test___repr__(self): + expected = ( + "Mesh(topology_dimension=1, node_coords_and_axes=[(AuxCoord(" + "array([0, 2, 1]), standard_name='longitude', units=Unit(" + "'unknown'), long_name='long_name', var_name='node_lon', " + "attributes={'test': 1}), 'x'), (AuxCoord(array([0, 0, 1]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='node_lat'), 'y')], connectivities=Connectivity(" + "cf_role='edge_node_connectivity', start_index=0), " + "edge_coords_and_axes=[(AuxCoord(array([1. , 1.5, 0.5]), " + "standard_name='longitude', units=Unit('unknown'), " + "var_name='edge_lon'), 'x'), (AuxCoord(array([0. , 0.5, 0.5]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='edge_lat'), 'y')], long_name='my_topology_mesh', " + "var_name='mesh', attributes={'notes': 'this is a test'}, " + "node_dimension='NodeDim', edge_dimension='EdgeDim')" + ) + self.assertEqual(expected, self.mesh.__repr__()) + def test_all_connectivities(self): expected = ugrid.Mesh1DConnectivities(EDGE_NODE) self.assertEqual(expected, self.mesh.all_connectivities) @@ -299,6 +318,35 @@ def setUpClass(cls): cls.KWARGS["face_coords_and_axes"] = ((FACE_LON, "x"), (FACE_LAT, "y")) super().setUpClass() + def test___repr__(self): + expected = ( + "Mesh(topology_dimension=2, node_coords_and_axes=[(AuxCoord(" + "array([0, 2, 1]), standard_name='longitude', units=Unit(" + "'unknown'), long_name='long_name', var_name='node_lon', " + "attributes={'test': 1}), 'x'), (AuxCoord(array([0, 0, 1]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='node_lat'), 'y')], connectivities=[Connectivity(" + "cf_role='face_node_connectivity', start_index=0), Connectivity(" + "cf_role='edge_node_connectivity', start_index=0), Connectivity(" + "cf_role='face_edge_connectivity', start_index=0), Connectivity(" + "cf_role='face_face_connectivity', start_index=0), Connectivity(" + "cf_role='edge_face_connectivity', start_index=0), Connectivity(" + "cf_role='boundary_node_connectivity', start_index=0)], " + "edge_coords_and_axes=[(AuxCoord(array([1. , 1.5, 0.5]), " + "standard_name='longitude', units=Unit('unknown'), " + "var_name='edge_lon'), 'x'), (AuxCoord(array([0. , 0.5, 0.5]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='edge_lat'), 'y')], face_coords_and_axes=[(AuxCoord(" + "array([0.5]), standard_name='longitude', units=Unit('unknown'), " + "var_name='face_lon'), 'x'), (AuxCoord(array([0.5]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='face_lat'), 'y')], long_name='my_topology_mesh', " + "var_name='mesh', attributes={'notes': 'this is a test'}, " + "node_dimension='NodeDim', edge_dimension='EdgeDim', " + "face_dimension='FaceDim')" + ) + self.assertEqual(expected, self.mesh.__repr__()) + def test_all_connectivities(self): expected = ugrid.Mesh2DConnectivities( FACE_NODE, From efdf79a75b55ca5ceda0f1e707d8f833eccebf69 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Thu, 25 Feb 2021 08:55:40 +0000 Subject: [PATCH 18/22] experimental.ugrid restore class ordering. --- lib/iris/experimental/ugrid.py | 236 ++++++++++++++++----------------- 1 file changed, 118 insertions(+), 118 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index e0339b9af2..f95c4451af 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -848,124 +848,6 @@ def equal(self, other, lenient=None): return super().equal(other, lenient=lenient) -class MeshCoordMetadata(BaseMetadata): - """ - Metadata container for a :class:`~iris.coords.MeshCoord`. - """ - - _members = ("location", "axis") - # NOTE: in future, we may add 'mesh' as part of this metadata, - # as the Mesh seems part of the 'identity' of a MeshCoord. - # For now we omit it, particularly as we don't yet implement Mesh.__eq__. - # - # Thus, for now, the MeshCoord class will need to handle 'mesh' explicitly - # in identity / comparison, but in future that may be simplified. - - __slots__ = () - - @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) - @lenient_service - def __eq__(self, other): - return super().__eq__(other) - - def _combine_lenient(self, other): - """ - Perform lenient combination of metadata members for MeshCoord. - - Args: - - * other (MeshCoordMetadata): - The other metadata participating in the lenient combination. - - Returns: - A list of combined metadata member values. - - """ - # It is actually "strict" : return None except where members are equal. - def func(field): - left = getattr(self, field) - right = getattr(other, field) - return left if left == right else None - - # Note that, we use "_members" not "_fields". - values = [func(field) for field in self._members] - # Perform lenient combination of the other parent members. - result = super()._combine_lenient(other) - result.extend(values) - - return result - - def _compare_lenient(self, other): - """ - Perform lenient equality of metadata members for MeshCoord. - - Args: - - * other (MeshCoordMetadata): - The other metadata participating in the lenient comparison. - - Returns: - Boolean. - - """ - # Perform "strict" comparison for the MeshCoord specific members - # 'location', 'axis' : for equality, they must all match. - result = all( - [ - getattr(self, field) == getattr(other, field) - for field in self._members - ] - ) - if result: - # Perform lenient comparison of the other parent members. - result = super()._compare_lenient(other) - - return result - - def _difference_lenient(self, other): - """ - Perform lenient difference of metadata members for MeshCoord. - - Args: - - * other (MeshCoordMetadata): - The other MeshCoord metadata participating in the lenient - difference. - - Returns: - A list of different metadata member values. - - """ - # Perform "strict" difference for location / axis. - def func(field): - left = getattr(self, field) - right = getattr(other, field) - return None if left == right else (left, right) - - # Note that, we use "_members" not "_fields". - values = [func(field) for field in self._members] - # Perform lenient difference of the other parent members. - result = super()._difference_lenient(other) - result.extend(values) - - return result - - @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) - @lenient_service - def combine(self, other, lenient=None): - return super().combine(other, lenient=lenient) - - @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) - @lenient_service - def difference(self, other, lenient=None): - return super().difference(other, lenient=lenient) - - @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) - @lenient_service - def equal(self, other, lenient=None): - return super().equal(other, lenient=lenient) - - class Mesh(CFVariableMixin): """ @@ -2266,6 +2148,124 @@ def face_node(self): return self._members["face_node_connectivity"] +class MeshCoordMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.coords.MeshCoord`. + """ + + _members = ("location", "axis") + # NOTE: in future, we may add 'mesh' as part of this metadata, + # as the Mesh seems part of the 'identity' of a MeshCoord. + # For now we omit it, particularly as we don't yet implement Mesh.__eq__. + # + # Thus, for now, the MeshCoord class will need to handle 'mesh' explicitly + # in identity / comparison, but in future that may be simplified. + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + return super().__eq__(other) + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members for MeshCoord. + + Args: + + * other (MeshCoordMetadata): + The other metadata participating in the lenient combination. + + Returns: + A list of combined metadata member values. + + """ + # It is actually "strict" : return None except where members are equal. + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return left if left == right else None + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in self._members] + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.extend(values) + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members for MeshCoord. + + Args: + + * other (MeshCoordMetadata): + The other metadata participating in the lenient comparison. + + Returns: + Boolean. + + """ + # Perform "strict" comparison for the MeshCoord specific members + # 'location', 'axis' : for equality, they must all match. + result = all( + [ + getattr(self, field) == getattr(other, field) + for field in self._members + ] + ) + if result: + # Perform lenient comparison of the other parent members. + result = super()._compare_lenient(other) + + return result + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members for MeshCoord. + + Args: + + * other (MeshCoordMetadata): + The other MeshCoord metadata participating in the lenient + difference. + + Returns: + A list of different metadata member values. + + """ + # Perform "strict" difference for location / axis. + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return None if left == right else (left, right) + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in self._members] + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.extend(values) + + return result + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + return super().equal(other, lenient=lenient) + + # Add our new optional metadata operations into the 'convenience collections' # of lenient metadata services. # TODO: when included in 'iris.common.metadata', install each one directly ? From 6837a6b390915f3b1f1c8746ddc0fc0c89051ef8 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Thu, 25 Feb 2021 11:31:56 +0000 Subject: [PATCH 19/22] Mesh tests - move global and class variables into setUpClass methods, to play nicely with unittest. --- .../unit/experimental/ugrid/test_Mesh.py | 422 ++++++++++-------- 1 file changed, 245 insertions(+), 177 deletions(-) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index 6e3a831bd2..69d3857d4f 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -15,64 +15,82 @@ from iris.exceptions import ConnectivityNotFoundError, CoordinateNotFoundError from iris.experimental import ugrid -# A collection of minimal coords and connectivities describing an equilateral triangle. -NODE_LON = AuxCoord( - [0, 2, 1], - standard_name="longitude", - long_name="long_name", - var_name="node_lon", - attributes={"test": 1}, -) -NODE_LAT = AuxCoord([0, 0, 1], standard_name="latitude", var_name="node_lat") -EDGE_LON = AuxCoord( - [1, 1.5, 0.5], standard_name="longitude", var_name="edge_lon" -) -EDGE_LAT = AuxCoord( - [0, 0.5, 0.5], standard_name="latitude", var_name="edge_lat" -) -FACE_LON = AuxCoord([0.5], standard_name="longitude", var_name="face_lon") -FACE_LAT = AuxCoord([0.5], standard_name="latitude", var_name="face_lat") - -EDGE_NODE = ugrid.Connectivity( - [[0, 1], [1, 2], [2, 0]], - cf_role="edge_node_connectivity", - long_name="long_name", - var_name="var_name", - attributes={"test": 1}, -) -FACE_NODE = ugrid.Connectivity([[0, 1, 2]], cf_role="face_node_connectivity") -FACE_EDGE = ugrid.Connectivity([[0, 1, 2]], cf_role="face_edge_connectivity") -# Actually meaningless: -FACE_FACE = ugrid.Connectivity([[0, 0, 0]], cf_role="face_face_connectivity") -# Actually meaningless: -EDGE_FACE = ugrid.Connectivity( - [[0, 0], [0, 0], [0, 0]], cf_role="edge_face_connectivity" -) -BOUNDARY_NODE = ugrid.Connectivity( - [[0, 1], [1, 2], [2, 0]], cf_role="boundary_node_connectivity" -) - - -class TestProperties1D(tests.IrisTest): - # Tests that can re-use a single instance for greater efficiency. - # Mesh kwargs with topology_dimension=1 and all applicable arguments - # populated - this tests correct property setting. - KWARGS = { - "topology_dimension": 1, - "node_coords_and_axes": ((NODE_LON, "x"), (NODE_LAT, "y")), - "connectivities": EDGE_NODE, - "long_name": "my_topology_mesh", - "var_name": "mesh", - "attributes": {"notes": "this is a test"}, - "node_dimension": "NodeDim", - "edge_dimension": "EdgeDim", - "edge_coords_and_axes": ((EDGE_LON, "x"), (EDGE_LAT, "y")), - } +class TestMeshCommon(tests.IrisTest): + @classmethod + def setUpClass(cls): + # A collection of minimal coords and connectivities describing an equilateral triangle. + # Re-used in most/all of the test classes, hence globals. + # global NODE_LON, NODE_LAT, EDGE_LON, EDGE_LAT, FACE_LON, FACE_LAT, EDGE_NODE, FACE_NODE, FACE_EDGE, FACE_FACE, EDGE_FACE, BOUNDARY_NODE + + cls.NODE_LON = AuxCoord( + [0, 2, 1], + standard_name="longitude", + long_name="long_name", + var_name="node_lon", + attributes={"test": 1}, + ) + cls.NODE_LAT = AuxCoord( + [0, 0, 1], standard_name="latitude", var_name="node_lat" + ) + cls.EDGE_LON = AuxCoord( + [1, 1.5, 0.5], standard_name="longitude", var_name="edge_lon" + ) + cls.EDGE_LAT = AuxCoord( + [0, 0.5, 0.5], standard_name="latitude", var_name="edge_lat" + ) + cls.FACE_LON = AuxCoord( + [0.5], standard_name="longitude", var_name="face_lon" + ) + cls.FACE_LAT = AuxCoord( + [0.5], standard_name="latitude", var_name="face_lat" + ) + + cls.EDGE_NODE = ugrid.Connectivity( + [[0, 1], [1, 2], [2, 0]], + cf_role="edge_node_connectivity", + long_name="long_name", + var_name="var_name", + attributes={"test": 1}, + ) + cls.FACE_NODE = ugrid.Connectivity( + [[0, 1, 2]], cf_role="face_node_connectivity" + ) + cls.FACE_EDGE = ugrid.Connectivity( + [[0, 1, 2]], cf_role="face_edge_connectivity" + ) + # (Actually meaningless:) + cls.FACE_FACE = ugrid.Connectivity( + [[0, 0, 0]], cf_role="face_face_connectivity" + ) + # (Actually meaningless:) + cls.EDGE_FACE = ugrid.Connectivity( + [[0, 0], [0, 0], [0, 0]], cf_role="edge_face_connectivity" + ) + cls.BOUNDARY_NODE = ugrid.Connectivity( + [[0, 1], [1, 2], [2, 0]], cf_role="boundary_node_connectivity" + ) + +class TestProperties1D(TestMeshCommon): + # Tests that can re-use a single instance for greater efficiency. @classmethod def setUpClass(cls): - cls.mesh = ugrid.Mesh(**cls.KWARGS) + super().setUpClass() + # Mesh kwargs with topology_dimension=1 and all applicable + # arguments populated - this tests correct property setting. + cls.kwargs = { + "topology_dimension": 1, + "node_coords_and_axes": ((cls.NODE_LON, "x"), (cls.NODE_LAT, "y")), + "connectivities": cls.EDGE_NODE, + "long_name": "my_topology_mesh", + "var_name": "mesh", + "attributes": {"notes": "this is a test"}, + "node_dimension": "NodeDim", + "edge_dimension": "EdgeDim", + "edge_coords_and_axes": ((cls.EDGE_LON, "x"), (cls.EDGE_LAT, "y")), + } + cls.mesh = ugrid.Mesh(**cls.kwargs) def test__metadata_manager(self): self.assertEqual( @@ -108,11 +126,13 @@ def test___repr__(self): self.assertEqual(expected, self.mesh.__repr__()) def test_all_connectivities(self): - expected = ugrid.Mesh1DConnectivities(EDGE_NODE) + expected = ugrid.Mesh1DConnectivities(self.EDGE_NODE) self.assertEqual(expected, self.mesh.all_connectivities) def test_all_coords(self): - expected = ugrid.Mesh1DCoords(NODE_LON, NODE_LAT, EDGE_LON, EDGE_LAT) + expected = ugrid.Mesh1DCoords( + self.NODE_LON, self.NODE_LAT, self.EDGE_LON, self.EDGE_LAT + ) self.assertEqual(expected, self.mesh.all_coords) def test_boundary_node(self): @@ -127,7 +147,7 @@ def test_cf_role(self): def test_connectivities(self): # General results. Method intended for inheritance. positive_kwargs = ( - {"item": EDGE_NODE}, + {"item": self.EDGE_NODE}, {"item": "long_name"}, {"long_name": "long_name"}, {"var_name": "var_name"}, @@ -151,7 +171,7 @@ def test_connectivities(self): func = self.mesh.connectivities for kwargs in positive_kwargs: self.assertEqual( - EDGE_NODE, func(**kwargs)["edge_node_connectivity"] + self.EDGE_NODE, func(**kwargs)["edge_node_connectivity"] ) for kwargs in negative_kwargs: self.assertNotIn("edge_node_connectivity", func(**kwargs)) @@ -170,7 +190,7 @@ def test_connectivities_locations(self): {"edge": False, "node": False}, ) - expected = {EDGE_NODE.cf_role: EDGE_NODE} + expected = {self.EDGE_NODE.cf_role: self.EDGE_NODE} func = self.mesh.connectivities for kwargs in positive_kwargs: self.assertEqual(expected, func(**kwargs)) @@ -191,7 +211,7 @@ def test_coord(self): def test_coords(self): # General results. Method intended for inheritance. positive_kwargs = ( - {"item": NODE_LON}, + {"item": self.NODE_LON}, {"item": "longitude"}, {"standard_name": "longitude"}, {"long_name": "long_name"}, @@ -211,17 +231,17 @@ def test_coords(self): func = self.mesh.coords for kwargs in positive_kwargs: - self.assertEqual(NODE_LON, func(**kwargs)["node_x"]) + self.assertEqual(self.NODE_LON, func(**kwargs)["node_x"]) for kwargs in negative_kwargs: self.assertNotIn("node_x", func(**kwargs)) def test_coords_locations(self): # topology_dimension-specific results. Method intended to be overridden. all_expected = { - "node_x": NODE_LON, - "node_y": NODE_LAT, - "edge_x": EDGE_LON, - "edge_y": EDGE_LAT, + "node_x": self.NODE_LON, + "node_y": self.NODE_LAT, + "edge_x": self.EDGE_LON, + "edge_y": self.EDGE_LAT, } kwargs_expected = ( @@ -249,11 +269,11 @@ def test_coords_locations(self): def test_edge_dimension(self): self.assertEqual( - self.KWARGS["edge_dimension"], self.mesh.edge_dimension + self.kwargs["edge_dimension"], self.mesh.edge_dimension ) def test_edge_coords(self): - expected = ugrid.MeshEdgeCoords(EDGE_LON, EDGE_LAT) + expected = ugrid.MeshEdgeCoords(self.EDGE_LON, self.EDGE_LAT) self.assertEqual(expected, self.mesh.edge_coords) def test_edge_face(self): @@ -261,7 +281,7 @@ def test_edge_face(self): _ = self.mesh.edge_face_connectivity def test_edge_node(self): - self.assertEqual(EDGE_NODE, self.mesh.edge_node_connectivity) + self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) def test_face_coords(self): with self.assertRaises(AttributeError): @@ -283,17 +303,17 @@ def test_face_node(self): _ = self.mesh.face_node_connectivity def test_node_coords(self): - expected = ugrid.MeshNodeCoords(NODE_LON, NODE_LAT) + expected = ugrid.MeshNodeCoords(self.NODE_LON, self.NODE_LAT) self.assertEqual(expected, self.mesh.node_coords) def test_node_dimension(self): self.assertEqual( - self.KWARGS["node_dimension"], self.mesh.node_dimension + self.kwargs["node_dimension"], self.mesh.node_dimension ) def test_topology_dimension(self): self.assertEqual( - self.KWARGS["topology_dimension"], self.mesh.topology_dimension + self.kwargs["topology_dimension"], self.mesh.topology_dimension ) # Read only. self.assertRaises( @@ -305,18 +325,22 @@ class TestProperties2D(TestProperties1D): # Additional/specialised tests for topology_dimension=2. @classmethod def setUpClass(cls): - cls.KWARGS["topology_dimension"] = 2 - cls.KWARGS["connectivities"] = ( - FACE_NODE, - EDGE_NODE, - FACE_EDGE, - FACE_FACE, - EDGE_FACE, - BOUNDARY_NODE, - ) - cls.KWARGS["face_dimension"] = "FaceDim" - cls.KWARGS["face_coords_and_axes"] = ((FACE_LON, "x"), (FACE_LAT, "y")) super().setUpClass() + cls.kwargs["topology_dimension"] = 2 + cls.kwargs["connectivities"] = ( + cls.FACE_NODE, + cls.EDGE_NODE, + cls.FACE_EDGE, + cls.FACE_FACE, + cls.EDGE_FACE, + cls.BOUNDARY_NODE, + ) + cls.kwargs["face_dimension"] = "FaceDim" + cls.kwargs["face_coords_and_axes"] = ( + (cls.FACE_LON, "x"), + (cls.FACE_LAT, "y"), + ) + cls.mesh = ugrid.Mesh(**cls.kwargs) def test___repr__(self): expected = ( @@ -349,23 +373,30 @@ def test___repr__(self): def test_all_connectivities(self): expected = ugrid.Mesh2DConnectivities( - FACE_NODE, - EDGE_NODE, - FACE_EDGE, - FACE_FACE, - EDGE_FACE, - BOUNDARY_NODE, + self.FACE_NODE, + self.EDGE_NODE, + self.FACE_EDGE, + self.FACE_FACE, + self.EDGE_FACE, + self.BOUNDARY_NODE, ) self.assertEqual(expected, self.mesh.all_connectivities) def test_all_coords(self): expected = ugrid.Mesh2DCoords( - NODE_LON, NODE_LAT, EDGE_LON, EDGE_LAT, FACE_LON, FACE_LAT + self.NODE_LON, + self.NODE_LAT, + self.EDGE_LON, + self.EDGE_LAT, + self.FACE_LON, + self.FACE_LAT, ) self.assertEqual(expected, self.mesh.all_coords) def test_boundary_node(self): - self.assertEqual(BOUNDARY_NODE, self.mesh.boundary_node_connectivity) + self.assertEqual( + self.BOUNDARY_NODE, self.mesh.boundary_node_connectivity + ) def test_connectivity(self): # See Mesh.connectivities tests for thorough coverage of cases. @@ -384,15 +415,35 @@ def test_connectivity(self): def test_connectivities_locations(self): kwargs_expected = ( - ({"node": True}, (EDGE_NODE, FACE_NODE, BOUNDARY_NODE)), - ({"edge": True}, (EDGE_NODE, FACE_EDGE, EDGE_FACE)), - ({"face": True}, (FACE_NODE, FACE_EDGE, FACE_FACE, EDGE_FACE)), - ({"node": False}, (FACE_EDGE, EDGE_FACE, FACE_FACE)), - ({"edge": False}, (FACE_NODE, BOUNDARY_NODE, FACE_FACE)), - ({"face": False}, (EDGE_NODE, BOUNDARY_NODE)), - ({"edge": True, "face": True}, (FACE_EDGE, EDGE_FACE)), - ({"node": False, "edge": False}, (FACE_FACE,)), - ({"node": True, "edge": False}, (FACE_NODE, BOUNDARY_NODE)), + ( + {"node": True}, + (self.EDGE_NODE, self.FACE_NODE, self.BOUNDARY_NODE), + ), + ({"edge": True}, (self.EDGE_NODE, self.FACE_EDGE, self.EDGE_FACE)), + ( + {"face": True}, + ( + self.FACE_NODE, + self.FACE_EDGE, + self.FACE_FACE, + self.EDGE_FACE, + ), + ), + ( + {"node": False}, + (self.FACE_EDGE, self.EDGE_FACE, self.FACE_FACE), + ), + ( + {"edge": False}, + (self.FACE_NODE, self.BOUNDARY_NODE, self.FACE_FACE), + ), + ({"face": False}, (self.EDGE_NODE, self.BOUNDARY_NODE)), + ({"edge": True, "face": True}, (self.FACE_EDGE, self.EDGE_FACE)), + ({"node": False, "edge": False}, (self.FACE_FACE,)), + ( + {"node": True, "edge": False}, + (self.FACE_NODE, self.BOUNDARY_NODE), + ), ({"node": False, "edge": False, "face": False}, []), ) func = self.mesh.connectivities @@ -402,12 +453,12 @@ def test_connectivities_locations(self): def test_coords_locations(self): all_expected = { - "node_x": NODE_LON, - "node_y": NODE_LAT, - "edge_x": EDGE_LON, - "edge_y": EDGE_LAT, - "face_x": FACE_LON, - "face_y": FACE_LAT, + "node_x": self.NODE_LON, + "node_y": self.NODE_LAT, + "edge_x": self.EDGE_LON, + "edge_y": self.EDGE_LAT, + "face_x": self.FACE_LON, + "face_y": self.FACE_LAT, } kwargs_expected = ( @@ -431,35 +482,35 @@ def test_coords_locations(self): self.assertEqual(expected, func(**kwargs)) def test_edge_face(self): - self.assertEqual(EDGE_FACE, self.mesh.edge_face_connectivity) + self.assertEqual(self.EDGE_FACE, self.mesh.edge_face_connectivity) def test_face_coords(self): - expected = ugrid.MeshFaceCoords(FACE_LON, FACE_LAT) + expected = ugrid.MeshFaceCoords(self.FACE_LON, self.FACE_LAT) self.assertEqual(expected, self.mesh.face_coords) def test_face_dimension(self): self.assertEqual( - self.KWARGS["face_dimension"], self.mesh.face_dimension + self.kwargs["face_dimension"], self.mesh.face_dimension ) def test_face_edge(self): - self.assertEqual(FACE_EDGE, self.mesh.face_edge_connectivity) + self.assertEqual(self.FACE_EDGE, self.mesh.face_edge_connectivity) def test_face_face(self): - self.assertEqual(FACE_FACE, self.mesh.face_face_connectivity) + self.assertEqual(self.FACE_FACE, self.mesh.face_face_connectivity) def test_face_node(self): - self.assertEqual(FACE_NODE, self.mesh.face_node_connectivity) + self.assertEqual(self.FACE_NODE, self.mesh.face_node_connectivity) -class TestOperations1D(tests.IrisTest): +class TestOperations1D(TestMeshCommon): # Tests that cannot re-use an existing Mesh instance, instead need a new # one each time. def setUp(self): self.mesh = ugrid.Mesh( topology_dimension=1, - node_coords_and_axes=((NODE_LON, "x"), (NODE_LAT, "y")), - connectivities=EDGE_NODE, + node_coords_and_axes=((self.NODE_LON, "x"), (self.NODE_LAT, "y")), + connectivities=self.EDGE_NODE, ) @staticmethod @@ -505,7 +556,7 @@ def test_add_connectivities(self): for new_len in (False, True): # REPLACE connectivities, first with one of the same length, then # with one of different length. - edge_node = self.new_connectivity(EDGE_NODE, new_len) + edge_node = self.new_connectivity(self.EDGE_NODE, new_len) self.mesh.add_connectivities(edge_node) self.assertEqual( ugrid.Mesh1DConnectivities(edge_node), @@ -513,8 +564,8 @@ def test_add_connectivities(self): ) def test_add_connectivities_duplicates(self): - edge_node_one = EDGE_NODE - edge_node_two = self.new_connectivity(EDGE_NODE) + edge_node_one = self.EDGE_NODE + edge_node_two = self.new_connectivity(self.EDGE_NODE) self.mesh.add_connectivities(edge_node_one, edge_node_two) self.assertEqual( edge_node_two, @@ -529,14 +580,14 @@ def test_add_connectivities_invalid(self): "foo", ) - face_node = FACE_NODE + face_node = self.FACE_NODE with self.assertLogs(ugrid.logger, level="DEBUG") as log: self.mesh.add_connectivities(face_node) self.assertIn("Not adding connectivity", log.output[0]) def test_add_coords(self): # ADD coords. - edge_kwargs = {"edge_x": EDGE_LON, "edge_y": EDGE_LAT} + edge_kwargs = {"edge_x": self.EDGE_LON, "edge_y": self.EDGE_LAT} self.mesh.add_coords(**edge_kwargs) self.assertEqual( ugrid.MeshEdgeCoords(**edge_kwargs), self.mesh.edge_coords @@ -546,12 +597,12 @@ def test_add_coords(self): # REPLACE coords, first with ones of the same shape, then with ones # of different shape. node_kwargs = { - "node_x": self.new_coord(NODE_LON, new_shape), - "node_y": self.new_coord(NODE_LAT, new_shape), + "node_x": self.new_coord(self.NODE_LON, new_shape), + "node_y": self.new_coord(self.NODE_LAT, new_shape), } edge_kwargs = { - "edge_x": self.new_coord(EDGE_LON, new_shape), - "edge_y": self.new_coord(EDGE_LAT, new_shape), + "edge_x": self.new_coord(self.EDGE_LON, new_shape), + "edge_y": self.new_coord(self.EDGE_LAT, new_shape), } self.mesh.add_coords(**node_kwargs, **edge_kwargs) self.assertEqual( @@ -563,7 +614,10 @@ def test_add_coords(self): def test_add_coords_face(self): self.assertRaises( - TypeError, self.mesh.add_coords, face_x=FACE_LON, face_y=FACE_LAT + TypeError, + self.mesh.add_coords, + face_x=self.FACE_LON, + face_y=self.FACE_LAT, ) def test_add_coords_invalid(self): @@ -572,7 +626,7 @@ def test_add_coords_invalid(self): TypeError, ".*requires to be an 'AuxCoord'.*", func, node_x="foo" ) self.assertRaisesRegex( - TypeError, ".*requires a x-axis like.*", func, node_x=NODE_LAT + TypeError, ".*requires a x-axis like.*", func, node_x=self.NODE_LAT ) climatological = AuxCoord( [0], @@ -587,21 +641,21 @@ def test_add_coords_invalid(self): func, node_x=climatological, ) - wrong_shape = NODE_LON.copy([0]) + wrong_shape = self.NODE_LON.copy([0]) self.assertRaisesRegex( ValueError, ".*requires to have shape.*", func, node_x=wrong_shape ) def test_add_coords_single(self): # ADD coord. - edge_x = EDGE_LON + edge_x = self.EDGE_LON expected = ugrid.MeshEdgeCoords(edge_x=edge_x, edge_y=None) self.mesh.add_coords(edge_x=edge_x) self.assertEqual(expected, self.mesh.edge_coords) # REPLACE coords. - node_x = self.new_coord(NODE_LON) - edge_x = self.new_coord(EDGE_LON) + node_x = self.new_coord(self.NODE_LON) + edge_x = self.new_coord(self.EDGE_LON) expected_nodes = ugrid.MeshNodeCoords( node_x=node_x, node_y=self.mesh.node_coords.node_y ) @@ -611,8 +665,8 @@ def test_add_coords_single(self): self.assertEqual(expected_edges, self.mesh.edge_coords) # Attempt to REPLACE coords with those of DIFFERENT SHAPE. - node_x = self.new_coord(NODE_LON, new_shape=True) - edge_x = self.new_coord(EDGE_LON, new_shape=True) + node_x = self.new_coord(self.NODE_LON, new_shape=True) + edge_x = self.new_coord(self.EDGE_LON, new_shape=True) node_kwarg = {"node_x": node_x} edge_kwarg = {"edge_x": edge_x} both_kwargs = dict(**node_kwarg, **edge_kwarg) @@ -625,7 +679,9 @@ def test_add_coords_single(self): ) def test_add_coords_single_face(self): - self.assertRaises(TypeError, self.mesh.add_coords, face_x=FACE_LON) + self.assertRaises( + TypeError, self.mesh.add_coords, face_x=self.FACE_LON + ) def test_dimension_names(self): # Test defaults. @@ -669,7 +725,7 @@ def test_remove_connectivities(self): """ positive_kwargs = ( - {"item": EDGE_NODE}, + {"item": self.EDGE_NODE}, {"item": "long_name"}, {"long_name": "long_name"}, {"var_name": "var_name"}, @@ -701,20 +757,20 @@ def test_remove_connectivities(self): with self.assertLogs(ugrid.logger, level="DEBUG") as log: self.mesh.remove_connectivities(**kwargs) self.assertIn("Ignoring request to remove", log.output[0]) - self.assertEqual(EDGE_NODE, self.mesh.edge_node_connectivity) + self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) for kwargs in negative_kwargs: with self.assertLogs(ugrid.logger, level="DEBUG") as log: # Check that the only debug log is the one we inserted. ugrid.logger.debug("foo") self.mesh.remove_connectivities(**kwargs) self.assertEqual(1, len(log.output)) - self.assertEqual(EDGE_NODE, self.mesh.edge_node_connectivity) + self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) def test_remove_coords(self): # Test that remove() mimics the coords() method correctly, # and prevents removal of mandatory coords. positive_kwargs = ( - {"item": NODE_LON}, + {"item": self.NODE_LON}, {"item": "longitude"}, {"standard_name": "longitude"}, {"long_name": "long_name"}, @@ -736,23 +792,23 @@ def test_remove_coords(self): with self.assertLogs(ugrid.logger, level="DEBUG") as log: self.mesh.remove_coords(**kwargs) self.assertIn("Ignoring request to remove", log.output[0]) - self.assertEqual(NODE_LON, self.mesh.node_coords.node_x) + self.assertEqual(self.NODE_LON, self.mesh.node_coords.node_x) for kwargs in negative_kwargs: with self.assertLogs(ugrid.logger, level="DEBUG") as log: # Check that the only debug log is the one we inserted. ugrid.logger.debug("foo") self.mesh.remove_coords(**kwargs) self.assertEqual(1, len(log.output)) - self.assertEqual(NODE_LON, self.mesh.node_coords.node_x) + self.assertEqual(self.NODE_LON, self.mesh.node_coords.node_x) # Test removal of optional connectivity. - self.mesh.add_coords(edge_x=EDGE_LON) + self.mesh.add_coords(edge_x=self.EDGE_LON) # Attempt to remove a non-existent coord. - self.mesh.remove_coords(EDGE_LAT) + self.mesh.remove_coords(self.EDGE_LAT) # Confirm that EDGE_LON is still there. - self.assertEqual(EDGE_LON, self.mesh.edge_coords.edge_x) + self.assertEqual(self.EDGE_LON, self.mesh.edge_coords.edge_x) # Remove EDGE_LON and confirm success. - self.mesh.remove_coords(EDGE_LON) + self.mesh.remove_coords(self.EDGE_LON) self.assertEqual(None, self.mesh.edge_coords.edge_x) @@ -761,18 +817,18 @@ class TestOperations2D(TestOperations1D): def setUp(self): self.mesh = ugrid.Mesh( topology_dimension=2, - node_coords_and_axes=((NODE_LON, "x"), (NODE_LAT, "y")), - connectivities=(FACE_NODE), + node_coords_and_axes=((self.NODE_LON, "x"), (self.NODE_LAT, "y")), + connectivities=(self.FACE_NODE), ) def test_add_connectivities(self): # ADD connectivities. kwargs = { - "edge_node": EDGE_NODE, - "face_edge": FACE_EDGE, - "face_face": FACE_FACE, - "edge_face": EDGE_FACE, - "boundary_node": BOUNDARY_NODE, + "edge_node": self.EDGE_NODE, + "face_edge": self.FACE_EDGE, + "face_face": self.FACE_FACE, + "edge_face": self.EDGE_FACE, + "boundary_node": self.BOUNDARY_NODE, } expected = ugrid.Mesh2DConnectivities( face_node=self.mesh.face_node_connectivity, **kwargs @@ -781,7 +837,7 @@ def test_add_connectivities(self): self.assertEqual(expected, self.mesh.all_connectivities) # REPLACE connectivities. - kwargs["face_node"] = FACE_NODE + kwargs["face_node"] = self.FACE_NODE for new_len in (False, True): # First replace with ones of same length, then with ones of # different length. @@ -796,9 +852,9 @@ def test_add_connectivities(self): def test_add_connectivities_inconsistent(self): # ADD Connectivities. - self.mesh.add_connectivities(EDGE_NODE) - face_edge = self.new_connectivity(FACE_EDGE, new_len=True) - edge_face = self.new_connectivity(EDGE_FACE, new_len=True) + self.mesh.add_connectivities(self.EDGE_NODE) + face_edge = self.new_connectivity(self.FACE_EDGE, new_len=True) + edge_face = self.new_connectivity(self.EDGE_FACE, new_len=True) for args in ([face_edge], [edge_face], [face_edge, edge_face]): self.assertRaisesRegex( ValueError, @@ -808,7 +864,7 @@ def test_add_connectivities_inconsistent(self): ) # REPLACE Connectivities - self.mesh.add_connectivities(FACE_EDGE, EDGE_FACE) + self.mesh.add_connectivities(self.FACE_EDGE, self.EDGE_FACE) for args in ([face_edge], [edge_face], [face_edge, edge_face]): self.assertRaisesRegex( ValueError, @@ -827,7 +883,7 @@ def test_add_connectivities_invalid(self): def test_add_coords_face(self): # ADD coords. - kwargs = {"face_x": FACE_LON, "face_y": FACE_LAT} + kwargs = {"face_x": self.FACE_LON, "face_y": self.FACE_LAT} self.mesh.add_coords(**kwargs) self.assertEqual(ugrid.MeshFaceCoords(**kwargs), self.mesh.face_coords) @@ -835,8 +891,8 @@ def test_add_coords_face(self): # REPLACE coords, first with ones of the same shape, then with ones # of different shape. kwargs = { - "face_x": self.new_coord(FACE_LON, new_shape), - "face_y": self.new_coord(FACE_LAT, new_shape), + "face_x": self.new_coord(self.FACE_LON, new_shape), + "face_y": self.new_coord(self.FACE_LAT, new_shape), } self.mesh.add_coords(**kwargs) self.assertEqual( @@ -845,19 +901,19 @@ def test_add_coords_face(self): def test_add_coords_single_face(self): # ADD coord. - face_x = FACE_LON + face_x = self.FACE_LON expected = ugrid.MeshFaceCoords(face_x=face_x, face_y=None) self.mesh.add_coords(face_x=face_x) self.assertEqual(expected, self.mesh.face_coords) # REPLACE coord. - face_x = self.new_coord(FACE_LON) + face_x = self.new_coord(self.FACE_LON) expected = ugrid.MeshFaceCoords(face_x=face_x, face_y=None) self.mesh.add_coords(face_x=face_x) self.assertEqual(expected, self.mesh.face_coords) # Attempt to REPLACE coord with that of DIFFERENT SHAPE. - face_x = self.new_coord(FACE_LON, new_shape=True) + face_x = self.new_coord(self.FACE_LON, new_shape=True) self.assertRaisesRegex( ValueError, ".*requires to have shape.*", @@ -894,11 +950,11 @@ def test_remove_connectivities(self): """Do what 1D test could not - test removal of optional connectivity.""" # Add an optional connectivity. - self.mesh.add_connectivities(FACE_FACE) + self.mesh.add_connectivities(self.FACE_FACE) # Attempt to remove a non-existent connectivity. - self.mesh.remove_connectivities(EDGE_NODE) + self.mesh.remove_connectivities(self.EDGE_NODE) # Confirm that FACE_FACE is still there. - self.assertEqual(FACE_FACE, self.mesh.face_face_connectivity) + self.assertEqual(self.FACE_FACE, self.mesh.face_face_connectivity) # Remove FACE_FACE and confirm success. self.mesh.remove_connectivities(face=True) self.assertEqual(None, self.mesh.face_face_connectivity) @@ -906,18 +962,21 @@ def test_remove_connectivities(self): def test_remove_coords(self): """Test the face argument.""" super().test_remove_coords() - self.mesh.add_coords(face_x=FACE_LON) - self.assertEqual(FACE_LON, self.mesh.face_coords.face_x) + self.mesh.add_coords(face_x=self.FACE_LON) + self.assertEqual(self.FACE_LON, self.mesh.face_coords.face_x) self.mesh.remove_coords(face=True) self.assertEqual(None, self.mesh.face_coords.face_x) -class InitValidation(tests.IrisTest): +class InitValidation(TestMeshCommon): def test_invalid_topology(self): kwargs = { "topology_dimension": 0, - "node_coords_and_axes": ((NODE_LON, "x"), (NODE_LAT, "y")), - "connectivities": EDGE_NODE, + "node_coords_and_axes": ( + (self.NODE_LON, "x"), + (self.NODE_LAT, "y"), + ), + "connectivities": self.EDGE_NODE, } self.assertRaisesRegex( ValueError, "Expected 'topology_dimension'.*", ugrid.Mesh, **kwargs @@ -926,28 +985,34 @@ def test_invalid_topology(self): def test_invalid_axes(self): kwargs = { "topology_dimension": 2, - "connectivities": FACE_NODE, + "connectivities": self.FACE_NODE, } self.assertRaisesRegex( ValueError, "Invalid axis specified for node.*", ugrid.Mesh, - node_coords_and_axes=((NODE_LON, "foo"), (NODE_LAT, "y")), + node_coords_and_axes=( + (self.NODE_LON, "foo"), + (self.NODE_LAT, "y"), + ), **kwargs, ) - kwargs["node_coords_and_axes"] = ((NODE_LON, "x"), (NODE_LAT, "y")) + kwargs["node_coords_and_axes"] = ( + (self.NODE_LON, "x"), + (self.NODE_LAT, "y"), + ) self.assertRaisesRegex( ValueError, "Invalid axis specified for edge.*", ugrid.Mesh, - edge_coords_and_axes=((EDGE_LON, "foo"),), + edge_coords_and_axes=((self.EDGE_LON, "foo"),), **kwargs, ) self.assertRaisesRegex( ValueError, "Invalid axis specified for face.*", ugrid.Mesh, - face_coords_and_axes=((FACE_LON, "foo"),), + face_coords_and_axes=((self.FACE_LON, "foo"),), **kwargs, ) @@ -957,8 +1022,11 @@ def test_minimum_connectivities(self): # Further validations are tested in add_connectivity tests. kwargs = { "topology_dimension": 1, - "node_coords_and_axes": ((NODE_LON, "x"), (NODE_LAT, "y")), - "connectivities": (FACE_NODE,), + "node_coords_and_axes": ( + (self.NODE_LON, "x"), + (self.NODE_LAT, "y"), + ), + "connectivities": (self.FACE_NODE,), } self.assertRaisesRegex( ValueError, @@ -971,8 +1039,8 @@ def test_minimum_coords(self): # Further validations are tested in add_coord tests. kwargs = { "topology_dimension": 1, - "node_coords_and_axes": ((NODE_LON, "x"), (None, "y")), - "connectivities": (FACE_NODE,), + "node_coords_and_axes": ((self.NODE_LON, "x"), (None, "y")), + "connectivities": (self.FACE_NODE,), } self.assertRaisesRegex( ValueError, ".*is a required coordinate.*", ugrid.Mesh, **kwargs From 447833d2f6d4cd134c2b95de9b74ded4596113ed Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Thu, 25 Feb 2021 11:34:42 +0000 Subject: [PATCH 20/22] Delete commented code. --- lib/iris/tests/unit/experimental/ugrid/test_Mesh.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index 69d3857d4f..748ce0c82d 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -19,10 +19,8 @@ class TestMeshCommon(tests.IrisTest): @classmethod def setUpClass(cls): - # A collection of minimal coords and connectivities describing an equilateral triangle. - # Re-used in most/all of the test classes, hence globals. - # global NODE_LON, NODE_LAT, EDGE_LON, EDGE_LAT, FACE_LON, FACE_LAT, EDGE_NODE, FACE_NODE, FACE_EDGE, FACE_FACE, EDGE_FACE, BOUNDARY_NODE - + # A collection of minimal coords and connectivities describing an + # equilateral triangle. cls.NODE_LON = AuxCoord( [0, 2, 1], standard_name="longitude", From acdf1888ecab38bb7b00260419f220d572aeaffa Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Thu, 25 Feb 2021 14:49:55 +0000 Subject: [PATCH 21/22] Mesh clearer distinction between coords and connectivities filters. --- lib/iris/experimental/ugrid.py | 149 +++++++++--------- .../unit/experimental/ugrid/test_Mesh.py | 140 ++++++++++------ 2 files changed, 166 insertions(+), 123 deletions(-) diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index f95c4451af..86c80bb2c1 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -1245,9 +1245,9 @@ def connectivities( var_name=None, attributes=None, cf_role=None, - node=None, - edge=None, - face=None, + contains_node=None, + contains_edge=None, + contains_face=None, ): return self._connectivity_manager.filters( item=item, @@ -1256,9 +1256,9 @@ def connectivities( var_name=var_name, attributes=attributes, cf_role=cf_role, - node=node, - edge=edge, - face=face, + contains_node=contains_node, + contains_edge=contains_edge, + contains_face=contains_face, ) def connectivity( @@ -1269,9 +1269,9 @@ def connectivity( var_name=None, attributes=None, cf_role=None, - node=None, - edge=None, - face=None, + contains_node=None, + contains_edge=None, + contains_face=None, ): return self._connectivity_manager.filter( item=item, @@ -1280,9 +1280,9 @@ def connectivity( var_name=var_name, attributes=attributes, cf_role=cf_role, - node=node, - edge=edge, - face=face, + contains_node=contains_node, + contains_edge=contains_edge, + contains_face=contains_face, ) def coord( @@ -1293,9 +1293,9 @@ def coord( var_name=None, attributes=None, axis=None, - node=None, - edge=None, - face=None, + include_nodes=None, + include_edges=None, + include_faces=None, ): return self._coord_manager.filter( item=item, @@ -1304,9 +1304,9 @@ def coord( var_name=var_name, attributes=attributes, axis=axis, - node=node, - edge=edge, - face=face, + include_nodes=include_nodes, + include_edges=include_edges, + include_faces=include_faces, ) def coords( @@ -1317,9 +1317,9 @@ def coords( var_name=None, attributes=None, axis=None, - node=None, - edge=None, - face=None, + include_nodes=None, + include_edges=None, + include_faces=None, ): return self._coord_manager.filters( item=item, @@ -1328,9 +1328,9 @@ def coords( var_name=var_name, attributes=attributes, axis=axis, - node=node, - edge=edge, - face=face, + include_nodes=include_nodes, + include_edges=include_edges, + include_faces=include_faces, ) def remove_connectivities( @@ -1341,9 +1341,9 @@ def remove_connectivities( var_name=None, attributes=None, cf_role=None, - node=None, - edge=None, - face=None, + contains_node=None, + contains_edge=None, + contains_face=None, ): return self._connectivity_manager.remove( item=item, @@ -1352,9 +1352,9 @@ def remove_connectivities( var_name=var_name, attributes=attributes, cf_role=cf_role, - node=node, - edge=edge, - face=face, + contains_node=contains_node, + contains_edge=contains_edge, + contains_face=contains_face, ) def remove_coords( @@ -1365,9 +1365,9 @@ def remove_coords( var_name=None, attributes=None, axis=None, - node=None, - edge=None, - face=None, + include_nodes=None, + include_edges=None, + include_faces=None, ): # Filter out absent arguments - only expecting face coords sometimes, # same will be true of volumes in future. @@ -1378,9 +1378,9 @@ def remove_coords( "var_name": var_name, "attributes": attributes, "axis": axis, - "node": node, - "edge": edge, - "face": face, + "include_nodes": include_nodes, + "include_edges": include_edges, + "include_faces": include_faces, } kwargs = {k: v for k, v in kwargs.items() if v} @@ -1682,33 +1682,32 @@ def filters( var_name=None, attributes=None, axis=None, - node=None, - edge=None, - face=None, + include_nodes=None, + include_edges=None, + include_faces=None, ): # TBD: support coord_systems? - face_requested = face is True - args = [node, edge, face] - true_count = len([arg for arg in args if arg]) - if true_count > 1: - # Standard filter behaviour is 'AND', and coord locations are - # mutually exclusive, so multiple True cannot return any results. - node = edge = face = False - elif true_count == 0: - # Treat None as True in this case. - node, edge, face = [True if arg is None else arg for arg in args] + # Preserve original argument before modifying. + face_requested = include_faces + + # Rationalise the tri-state behaviour. + args = [include_nodes, include_edges, include_faces] + state = not any(set(filter(lambda arg: arg is not None, args))) + include_nodes, include_edges, include_faces = map( + lambda arg: arg if arg is not None else state, args + ) def populated_coords(coords_tuple): return list(filter(None, list(coords_tuple))) members = [] - if node: + if include_nodes: members += populated_coords(self.node_coords) - if edge: + if include_edges: members += populated_coords(self.edge_coords) if hasattr(self, "face_coords"): - if face: + if include_faces: members += populated_coords(self.face_coords) elif face_requested: dmsg = "Ignoring request to filter non-existent 'face_coords'" @@ -1739,8 +1738,8 @@ def remove( var_name=None, attributes=None, axis=None, - node=None, - edge=None, + include_nodes=None, + include_edges=None, ): return self._remove( item=item, @@ -1749,8 +1748,8 @@ def remove( var_name=var_name, attributes=attributes, axis=axis, - node=node, - edge=edge, + include_nodes=include_nodes, + include_edges=include_edges, ) @@ -1829,9 +1828,9 @@ def remove( var_name=None, attributes=None, axis=None, - node=None, - edge=None, - face=None, + include_nodes=None, + include_edges=None, + include_faces=None, ): return self._remove( item=item, @@ -1840,9 +1839,9 @@ def remove( var_name=var_name, attributes=attributes, axis=axis, - node=node, - edge=edge, - face=face, + include_nodes=include_nodes, + include_edges=include_edges, + include_faces=include_faces, ) @@ -1987,9 +1986,9 @@ def filters( var_name=None, attributes=None, cf_role=None, - node=None, - edge=None, - face=None, + contains_node=None, + contains_edge=None, + contains_face=None, ): members = [c for c in self._members.values() if c is not None] @@ -2020,16 +2019,16 @@ def location_filter(instances, loc_arg, loc_name): return filtered for arg, loc in ( - (node, "node"), - (edge, "edge"), - (face, "face"), + (contains_node, "node"), + (contains_edge, "edge"), + (contains_face, "face"), ): members = location_filter(members, arg, loc) # No need to actually modify filtering behaviour - already won't return # any face cf-roles if none are present. supports_faces = any(["face" in role for role in self.ALL]) - if face and not supports_faces: + if contains_face and not supports_faces: message = ( "Ignoring request to filter for non-existent 'face' cf-roles." ) @@ -2059,9 +2058,9 @@ def remove( var_name=None, attributes=None, cf_role=None, - node=None, - edge=None, - face=None, + contains_node=None, + contains_edge=None, + contains_face=None, ): removal_dict = self.filters( item=item, @@ -2070,9 +2069,9 @@ def remove( var_name=var_name, attributes=attributes, cf_role=cf_role, - node=node, - edge=edge, - face=face, + contains_node=contains_node, + contains_edge=contains_edge, + contains_face=contains_face, ) for cf_role in self.REQUIRED: excluded = removal_dict.pop(cf_role, None) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index 748ce0c82d..076e6da46a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -177,15 +177,15 @@ def test_connectivities(self): def test_connectivities_locations(self): # topology_dimension-specific results. Method intended to be overridden. positive_kwargs = ( - {"node": True}, - {"edge": True}, - {"node": True, "edge": True}, + {"contains_node": True}, + {"contains_edge": True}, + {"contains_node": True, "contains_edge": True}, ) negative_kwargs = ( - {"node": False}, - {"edge": False}, - {"edge": True, "node": False}, - {"edge": False, "node": False}, + {"contains_node": False}, + {"contains_edge": False}, + {"contains_edge": True, "contains_node": False}, + {"contains_edge": False, "contains_node": False}, ) expected = {self.EDGE_NODE.cf_role: self.EDGE_NODE} @@ -196,14 +196,16 @@ def test_connectivities_locations(self): self.assertEqual({}, func(**kwargs)) with self.assertLogs(ugrid.logger, level="DEBUG") as log: - self.assertEqual({}, func(face=True)) + self.assertEqual({}, func(contains_face=True)) self.assertIn("filter for non-existent", log.output[0]) def test_coord(self): # See Mesh.coords tests for thorough coverage of cases. func = self.mesh.coord exception = CoordinateNotFoundError - self.assertRaisesRegex(exception, ".*but found 2", func, node=True) + self.assertRaisesRegex( + exception, ".*but found 2", func, include_nodes=True + ) self.assertRaisesRegex(exception, ".*but found none", func, axis="t") def test_coords(self): @@ -245,13 +247,19 @@ def test_coords_locations(self): kwargs_expected = ( ({"axis": "x"}, ("node_x", "edge_x")), ({"axis": "y"}, ("node_y", "edge_y")), - ({"node": True}, ("node_x", "node_y")), - ({"edge": True}, ("edge_x", "edge_y")), - ({"node": False}, ("edge_x", "edge_y")), - ({"edge": False}, ("node_x", "node_y")), - ({"node": True, "edge": True}, []), - ({"node": False, "edge": False}, []), - ({"node": False, "edge": True}, ("edge_x", "edge_y")), + ({"include_nodes": True}, ("node_x", "node_y")), + ({"include_edges": True}, ("edge_x", "edge_y")), + ({"include_nodes": False}, ("edge_x", "edge_y")), + ({"include_edges": False}, ("node_x", "node_y")), + ( + {"include_nodes": True, "include_edges": True}, + ["node_x", "node_y", "edge_x", "edge_y"], + ), + ({"include_nodes": False, "include_edges": False}, []), + ( + {"include_nodes": False, "include_edges": True}, + ("edge_x", "edge_y"), + ), ) func = self.mesh.coords @@ -262,7 +270,7 @@ def test_coords_locations(self): self.assertEqual(expected, func(**kwargs)) with self.assertLogs(ugrid.logger, level="DEBUG") as log: - self.assertEqual({}, func(face=True)) + self.assertEqual({}, func(include_faces=True)) self.assertIn("filter non-existent", log.output[0]) def test_edge_dimension(self): @@ -401,25 +409,30 @@ def test_connectivity(self): # Can only test Mesh.connectivity for 2D since we need >1 connectivity. func = self.mesh.connectivity exception = ConnectivityNotFoundError - self.assertRaisesRegex(exception, ".*but found 3", func, node=True) + self.assertRaisesRegex( + exception, ".*but found 3", func, contains_node=True + ) self.assertRaisesRegex( exception, ".*but found none", func, - node=False, - edge=False, - face=False, + contains_node=False, + contains_edge=False, + contains_face=False, ) def test_connectivities_locations(self): kwargs_expected = ( ( - {"node": True}, + {"contains_node": True}, (self.EDGE_NODE, self.FACE_NODE, self.BOUNDARY_NODE), ), - ({"edge": True}, (self.EDGE_NODE, self.FACE_EDGE, self.EDGE_FACE)), ( - {"face": True}, + {"contains_edge": True}, + (self.EDGE_NODE, self.FACE_EDGE, self.EDGE_FACE), + ), + ( + {"contains_face": True}, ( self.FACE_NODE, self.FACE_EDGE, @@ -428,21 +441,34 @@ def test_connectivities_locations(self): ), ), ( - {"node": False}, + {"contains_node": False}, (self.FACE_EDGE, self.EDGE_FACE, self.FACE_FACE), ), ( - {"edge": False}, + {"contains_edge": False}, (self.FACE_NODE, self.BOUNDARY_NODE, self.FACE_FACE), ), - ({"face": False}, (self.EDGE_NODE, self.BOUNDARY_NODE)), - ({"edge": True, "face": True}, (self.FACE_EDGE, self.EDGE_FACE)), - ({"node": False, "edge": False}, (self.FACE_FACE,)), + ({"contains_face": False}, (self.EDGE_NODE, self.BOUNDARY_NODE)), + ( + {"contains_edge": True, "contains_face": True}, + (self.FACE_EDGE, self.EDGE_FACE), + ), + ( + {"contains_node": False, "contains_edge": False}, + (self.FACE_FACE,), + ), ( - {"node": True, "edge": False}, + {"contains_node": True, "contains_edge": False}, (self.FACE_NODE, self.BOUNDARY_NODE), ), - ({"node": False, "edge": False, "face": False}, []), + ( + { + "contains_node": False, + "contains_edge": False, + "contains_face": False, + }, + [], + ), ) func = self.mesh.connectivities for kwargs, expected in kwargs_expected: @@ -462,14 +488,32 @@ def test_coords_locations(self): kwargs_expected = ( ({"axis": "x"}, ("node_x", "edge_x", "face_x")), ({"axis": "y"}, ("node_y", "edge_y", "face_y")), - ({"node": True}, ("node_x", "node_y")), - ({"edge": True}, ("edge_x", "edge_y")), - ({"node": False}, ("edge_x", "edge_y", "face_x", "face_y")), - ({"edge": False}, ("node_x", "node_y", "face_x", "face_y")), - ({"face": False}, ("node_x", "node_y", "edge_x", "edge_y")), - ({"face": True, "edge": True}, []), - ({"face": False, "edge": False}, ["node_x", "node_y"]), - ({"face": False, "edge": True}, ["edge_x", "edge_y"]), + ({"include_nodes": True}, ("node_x", "node_y")), + ({"include_edges": True}, ("edge_x", "edge_y")), + ( + {"include_nodes": False}, + ("edge_x", "edge_y", "face_x", "face_y"), + ), + ( + {"include_edges": False}, + ("node_x", "node_y", "face_x", "face_y"), + ), + ( + {"include_faces": False}, + ("node_x", "node_y", "edge_x", "edge_y"), + ), + ( + {"include_faces": True, "include_edges": True}, + ("edge_x", "edge_y", "face_x", "face_y"), + ), + ( + {"include_faces": False, "include_edges": False}, + ("node_x", "node_y"), + ), + ( + {"include_faces": False, "include_edges": True}, + ("edge_x", "edge_y"), + ), ) func = self.mesh.coords @@ -729,9 +773,9 @@ def test_remove_connectivities(self): {"var_name": "var_name"}, {"attributes": {"test": 1}}, {"cf_role": "edge_node_connectivity"}, - {"node": True}, - {"edge": True}, - {"edge": True, "node": True}, + {"contains_node": True}, + {"contains_edge": True}, + {"contains_edge": True, "contains_node": True}, ) fake_connectivity = tests.mock.Mock( @@ -745,10 +789,10 @@ def test_remove_connectivities(self): {"var_name": "foo"}, {"attributes": {"test": 2}}, {"cf_role": "foo"}, - {"node": False}, - {"edge": False}, - {"edge": True, "node": False}, - {"edge": False, "node": False}, + {"contains_node": False}, + {"contains_edge": False}, + {"contains_edge": True, "contains_node": False}, + {"contains_edge": False, "contains_node": False}, ) for kwargs in positive_kwargs: @@ -954,7 +998,7 @@ def test_remove_connectivities(self): # Confirm that FACE_FACE is still there. self.assertEqual(self.FACE_FACE, self.mesh.face_face_connectivity) # Remove FACE_FACE and confirm success. - self.mesh.remove_connectivities(face=True) + self.mesh.remove_connectivities(contains_face=True) self.assertEqual(None, self.mesh.face_face_connectivity) def test_remove_coords(self): @@ -962,7 +1006,7 @@ def test_remove_coords(self): super().test_remove_coords() self.mesh.add_coords(face_x=self.FACE_LON) self.assertEqual(self.FACE_LON, self.mesh.face_coords.face_x) - self.mesh.remove_coords(face=True) + self.mesh.remove_coords(include_faces=True) self.assertEqual(None, self.mesh.face_coords.face_x) From d70ad7a6311f244803cc67a48cb8cbeb774973d6 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Thu, 25 Feb 2021 14:55:25 +0000 Subject: [PATCH 22/22] Mesh tests slight readability improvement. --- .../unit/experimental/ugrid/test_Mesh.py | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py index 076e6da46a..2678a24e6e 100644 --- a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -245,12 +245,12 @@ def test_coords_locations(self): } kwargs_expected = ( - ({"axis": "x"}, ("node_x", "edge_x")), - ({"axis": "y"}, ("node_y", "edge_y")), - ({"include_nodes": True}, ("node_x", "node_y")), - ({"include_edges": True}, ("edge_x", "edge_y")), - ({"include_nodes": False}, ("edge_x", "edge_y")), - ({"include_edges": False}, ("node_x", "node_y")), + ({"axis": "x"}, ["node_x", "edge_x"]), + ({"axis": "y"}, ["node_y", "edge_y"]), + ({"include_nodes": True}, ["node_x", "node_y"]), + ({"include_edges": True}, ["edge_x", "edge_y"]), + ({"include_nodes": False}, ["edge_x", "edge_y"]), + ({"include_edges": False}, ["node_x", "node_y"]), ( {"include_nodes": True, "include_edges": True}, ["node_x", "node_y", "edge_x", "edge_y"], @@ -258,7 +258,7 @@ def test_coords_locations(self): ({"include_nodes": False, "include_edges": False}, []), ( {"include_nodes": False, "include_edges": True}, - ("edge_x", "edge_y"), + ["edge_x", "edge_y"], ), ) @@ -425,41 +425,41 @@ def test_connectivities_locations(self): kwargs_expected = ( ( {"contains_node": True}, - (self.EDGE_NODE, self.FACE_NODE, self.BOUNDARY_NODE), + [self.EDGE_NODE, self.FACE_NODE, self.BOUNDARY_NODE], ), ( {"contains_edge": True}, - (self.EDGE_NODE, self.FACE_EDGE, self.EDGE_FACE), + [self.EDGE_NODE, self.FACE_EDGE, self.EDGE_FACE], ), ( {"contains_face": True}, - ( + [ self.FACE_NODE, self.FACE_EDGE, self.FACE_FACE, self.EDGE_FACE, - ), + ], ), ( {"contains_node": False}, - (self.FACE_EDGE, self.EDGE_FACE, self.FACE_FACE), + [self.FACE_EDGE, self.EDGE_FACE, self.FACE_FACE], ), ( {"contains_edge": False}, - (self.FACE_NODE, self.BOUNDARY_NODE, self.FACE_FACE), + [self.FACE_NODE, self.BOUNDARY_NODE, self.FACE_FACE], ), - ({"contains_face": False}, (self.EDGE_NODE, self.BOUNDARY_NODE)), + ({"contains_face": False}, [self.EDGE_NODE, self.BOUNDARY_NODE]), ( {"contains_edge": True, "contains_face": True}, - (self.FACE_EDGE, self.EDGE_FACE), + [self.FACE_EDGE, self.EDGE_FACE], ), ( {"contains_node": False, "contains_edge": False}, - (self.FACE_FACE,), + [self.FACE_FACE], ), ( {"contains_node": True, "contains_edge": False}, - (self.FACE_NODE, self.BOUNDARY_NODE), + [self.FACE_NODE, self.BOUNDARY_NODE], ), ( { @@ -486,33 +486,33 @@ def test_coords_locations(self): } kwargs_expected = ( - ({"axis": "x"}, ("node_x", "edge_x", "face_x")), - ({"axis": "y"}, ("node_y", "edge_y", "face_y")), - ({"include_nodes": True}, ("node_x", "node_y")), - ({"include_edges": True}, ("edge_x", "edge_y")), + ({"axis": "x"}, ["node_x", "edge_x", "face_x"]), + ({"axis": "y"}, ["node_y", "edge_y", "face_y"]), + ({"include_nodes": True}, ["node_x", "node_y"]), + ({"include_edges": True}, ["edge_x", "edge_y"]), ( {"include_nodes": False}, - ("edge_x", "edge_y", "face_x", "face_y"), + ["edge_x", "edge_y", "face_x", "face_y"], ), ( {"include_edges": False}, - ("node_x", "node_y", "face_x", "face_y"), + ["node_x", "node_y", "face_x", "face_y"], ), ( {"include_faces": False}, - ("node_x", "node_y", "edge_x", "edge_y"), + ["node_x", "node_y", "edge_x", "edge_y"], ), ( {"include_faces": True, "include_edges": True}, - ("edge_x", "edge_y", "face_x", "face_y"), + ["edge_x", "edge_y", "face_x", "face_y"], ), ( {"include_faces": False, "include_edges": False}, - ("node_x", "node_y"), + ["node_x", "node_y"], ), ( {"include_faces": False, "include_edges": True}, - ("edge_x", "edge_y"), + ["edge_x", "edge_y"], ), )